You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by pt...@apache.org on 2015/11/05 21:40:40 UTC
[01/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Repository: storm
Updated Branches:
refs/heads/jstorm-import [created] dbc1236fb
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/MetricInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/MetricInfo.java b/jstorm-core/src/main/java/backtype/storm/generated/MetricInfo.java
new file mode 100644
index 0000000..2703777
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/MetricInfo.java
@@ -0,0 +1,902 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class MetricInfo implements org.apache.thrift.TBase<MetricInfo, MetricInfo._Fields>, java.io.Serializable, Cloneable, Comparable<MetricInfo> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("MetricInfo");
+
+ private static final org.apache.thrift.protocol.TField BASE_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("baseMetric", org.apache.thrift.protocol.TType.MAP, (short)1);
+ private static final org.apache.thrift.protocol.TField INPUT_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("inputMetric", org.apache.thrift.protocol.TType.MAP, (short)2);
+ private static final org.apache.thrift.protocol.TField OUTPUT_METRIC_FIELD_DESC = new org.apache.thrift.protocol.TField("outputMetric", org.apache.thrift.protocol.TType.MAP, (short)3);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new MetricInfoStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new MetricInfoTupleSchemeFactory());
+ }
+
+ private Map<String,MetricWindow> baseMetric; // required
+ private Map<String,Map<String,MetricWindow>> inputMetric; // optional
+ private Map<String,Map<String,MetricWindow>> outputMetric; // optional
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ BASE_METRIC((short)1, "baseMetric"),
+ INPUT_METRIC((short)2, "inputMetric"),
+ OUTPUT_METRIC((short)3, "outputMetric");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // BASE_METRIC
+ return BASE_METRIC;
+ case 2: // INPUT_METRIC
+ return INPUT_METRIC;
+ case 3: // OUTPUT_METRIC
+ return OUTPUT_METRIC;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final _Fields optionals[] = {_Fields.INPUT_METRIC,_Fields.OUTPUT_METRIC};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.BASE_METRIC, new org.apache.thrift.meta_data.FieldMetaData("baseMetric", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, MetricWindow.class))));
+ tmpMap.put(_Fields.INPUT_METRIC, new org.apache.thrift.meta_data.FieldMetaData("inputMetric", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, MetricWindow.class)))));
+ tmpMap.put(_Fields.OUTPUT_METRIC, new org.apache.thrift.meta_data.FieldMetaData("outputMetric", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, MetricWindow.class)))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(MetricInfo.class, metaDataMap);
+ }
+
+ public MetricInfo() {
+ }
+
+ public MetricInfo(
+ Map<String,MetricWindow> baseMetric)
+ {
+ this();
+ this.baseMetric = baseMetric;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public MetricInfo(MetricInfo other) {
+ if (other.is_set_baseMetric()) {
+ Map<String,MetricWindow> __this__baseMetric = new HashMap<String,MetricWindow>(other.baseMetric.size());
+ for (Map.Entry<String, MetricWindow> other_element : other.baseMetric.entrySet()) {
+
+ String other_element_key = other_element.getKey();
+ MetricWindow other_element_value = other_element.getValue();
+
+ String __this__baseMetric_copy_key = other_element_key;
+
+ MetricWindow __this__baseMetric_copy_value = new MetricWindow(other_element_value);
+
+ __this__baseMetric.put(__this__baseMetric_copy_key, __this__baseMetric_copy_value);
+ }
+ this.baseMetric = __this__baseMetric;
+ }
+ if (other.is_set_inputMetric()) {
+ Map<String,Map<String,MetricWindow>> __this__inputMetric = new HashMap<String,Map<String,MetricWindow>>(other.inputMetric.size());
+ for (Map.Entry<String, Map<String,MetricWindow>> other_element : other.inputMetric.entrySet()) {
+
+ String other_element_key = other_element.getKey();
+ Map<String,MetricWindow> other_element_value = other_element.getValue();
+
+ String __this__inputMetric_copy_key = other_element_key;
+
+ Map<String,MetricWindow> __this__inputMetric_copy_value = new HashMap<String,MetricWindow>(other_element_value.size());
+ for (Map.Entry<String, MetricWindow> other_element_value_element : other_element_value.entrySet()) {
+
+ String other_element_value_element_key = other_element_value_element.getKey();
+ MetricWindow other_element_value_element_value = other_element_value_element.getValue();
+
+ String __this__inputMetric_copy_value_copy_key = other_element_value_element_key;
+
+ MetricWindow __this__inputMetric_copy_value_copy_value = new MetricWindow(other_element_value_element_value);
+
+ __this__inputMetric_copy_value.put(__this__inputMetric_copy_value_copy_key, __this__inputMetric_copy_value_copy_value);
+ }
+
+ __this__inputMetric.put(__this__inputMetric_copy_key, __this__inputMetric_copy_value);
+ }
+ this.inputMetric = __this__inputMetric;
+ }
+ if (other.is_set_outputMetric()) {
+ Map<String,Map<String,MetricWindow>> __this__outputMetric = new HashMap<String,Map<String,MetricWindow>>(other.outputMetric.size());
+ for (Map.Entry<String, Map<String,MetricWindow>> other_element : other.outputMetric.entrySet()) {
+
+ String other_element_key = other_element.getKey();
+ Map<String,MetricWindow> other_element_value = other_element.getValue();
+
+ String __this__outputMetric_copy_key = other_element_key;
+
+ Map<String,MetricWindow> __this__outputMetric_copy_value = new HashMap<String,MetricWindow>(other_element_value.size());
+ for (Map.Entry<String, MetricWindow> other_element_value_element : other_element_value.entrySet()) {
+
+ String other_element_value_element_key = other_element_value_element.getKey();
+ MetricWindow other_element_value_element_value = other_element_value_element.getValue();
+
+ String __this__outputMetric_copy_value_copy_key = other_element_value_element_key;
+
+ MetricWindow __this__outputMetric_copy_value_copy_value = new MetricWindow(other_element_value_element_value);
+
+ __this__outputMetric_copy_value.put(__this__outputMetric_copy_value_copy_key, __this__outputMetric_copy_value_copy_value);
+ }
+
+ __this__outputMetric.put(__this__outputMetric_copy_key, __this__outputMetric_copy_value);
+ }
+ this.outputMetric = __this__outputMetric;
+ }
+ }
+
+ public MetricInfo deepCopy() {
+ return new MetricInfo(this);
+ }
+
+ @Override
+ public void clear() {
+ this.baseMetric = null;
+ this.inputMetric = null;
+ this.outputMetric = null;
+ }
+
+ public int get_baseMetric_size() {
+ return (this.baseMetric == null) ? 0 : this.baseMetric.size();
+ }
+
+ public void put_to_baseMetric(String key, MetricWindow val) {
+ if (this.baseMetric == null) {
+ this.baseMetric = new HashMap<String,MetricWindow>();
+ }
+ this.baseMetric.put(key, val);
+ }
+
+ public Map<String,MetricWindow> get_baseMetric() {
+ return this.baseMetric;
+ }
+
+ public void set_baseMetric(Map<String,MetricWindow> baseMetric) {
+ this.baseMetric = baseMetric;
+ }
+
+ public void unset_baseMetric() {
+ this.baseMetric = null;
+ }
+
+ /** Returns true if field baseMetric is set (has been assigned a value) and false otherwise */
+ public boolean is_set_baseMetric() {
+ return this.baseMetric != null;
+ }
+
+ public void set_baseMetric_isSet(boolean value) {
+ if (!value) {
+ this.baseMetric = null;
+ }
+ }
+
+ public int get_inputMetric_size() {
+ return (this.inputMetric == null) ? 0 : this.inputMetric.size();
+ }
+
+ public void put_to_inputMetric(String key, Map<String,MetricWindow> val) {
+ if (this.inputMetric == null) {
+ this.inputMetric = new HashMap<String,Map<String,MetricWindow>>();
+ }
+ this.inputMetric.put(key, val);
+ }
+
+ public Map<String,Map<String,MetricWindow>> get_inputMetric() {
+ return this.inputMetric;
+ }
+
+ public void set_inputMetric(Map<String,Map<String,MetricWindow>> inputMetric) {
+ this.inputMetric = inputMetric;
+ }
+
+ public void unset_inputMetric() {
+ this.inputMetric = null;
+ }
+
+ /** Returns true if field inputMetric is set (has been assigned a value) and false otherwise */
+ public boolean is_set_inputMetric() {
+ return this.inputMetric != null;
+ }
+
+ public void set_inputMetric_isSet(boolean value) {
+ if (!value) {
+ this.inputMetric = null;
+ }
+ }
+
+ public int get_outputMetric_size() {
+ return (this.outputMetric == null) ? 0 : this.outputMetric.size();
+ }
+
+ public void put_to_outputMetric(String key, Map<String,MetricWindow> val) {
+ if (this.outputMetric == null) {
+ this.outputMetric = new HashMap<String,Map<String,MetricWindow>>();
+ }
+ this.outputMetric.put(key, val);
+ }
+
+ public Map<String,Map<String,MetricWindow>> get_outputMetric() {
+ return this.outputMetric;
+ }
+
+ public void set_outputMetric(Map<String,Map<String,MetricWindow>> outputMetric) {
+ this.outputMetric = outputMetric;
+ }
+
+ public void unset_outputMetric() {
+ this.outputMetric = null;
+ }
+
+ /** Returns true if field outputMetric is set (has been assigned a value) and false otherwise */
+ public boolean is_set_outputMetric() {
+ return this.outputMetric != null;
+ }
+
+ public void set_outputMetric_isSet(boolean value) {
+ if (!value) {
+ this.outputMetric = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case BASE_METRIC:
+ if (value == null) {
+ unset_baseMetric();
+ } else {
+ set_baseMetric((Map<String,MetricWindow>)value);
+ }
+ break;
+
+ case INPUT_METRIC:
+ if (value == null) {
+ unset_inputMetric();
+ } else {
+ set_inputMetric((Map<String,Map<String,MetricWindow>>)value);
+ }
+ break;
+
+ case OUTPUT_METRIC:
+ if (value == null) {
+ unset_outputMetric();
+ } else {
+ set_outputMetric((Map<String,Map<String,MetricWindow>>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case BASE_METRIC:
+ return get_baseMetric();
+
+ case INPUT_METRIC:
+ return get_inputMetric();
+
+ case OUTPUT_METRIC:
+ return get_outputMetric();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case BASE_METRIC:
+ return is_set_baseMetric();
+ case INPUT_METRIC:
+ return is_set_inputMetric();
+ case OUTPUT_METRIC:
+ return is_set_outputMetric();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof MetricInfo)
+ return this.equals((MetricInfo)that);
+ return false;
+ }
+
+ public boolean equals(MetricInfo that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_baseMetric = true && this.is_set_baseMetric();
+ boolean that_present_baseMetric = true && that.is_set_baseMetric();
+ if (this_present_baseMetric || that_present_baseMetric) {
+ if (!(this_present_baseMetric && that_present_baseMetric))
+ return false;
+ if (!this.baseMetric.equals(that.baseMetric))
+ return false;
+ }
+
+ boolean this_present_inputMetric = true && this.is_set_inputMetric();
+ boolean that_present_inputMetric = true && that.is_set_inputMetric();
+ if (this_present_inputMetric || that_present_inputMetric) {
+ if (!(this_present_inputMetric && that_present_inputMetric))
+ return false;
+ if (!this.inputMetric.equals(that.inputMetric))
+ return false;
+ }
+
+ boolean this_present_outputMetric = true && this.is_set_outputMetric();
+ boolean that_present_outputMetric = true && that.is_set_outputMetric();
+ if (this_present_outputMetric || that_present_outputMetric) {
+ if (!(this_present_outputMetric && that_present_outputMetric))
+ return false;
+ if (!this.outputMetric.equals(that.outputMetric))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_baseMetric = true && (is_set_baseMetric());
+ list.add(present_baseMetric);
+ if (present_baseMetric)
+ list.add(baseMetric);
+
+ boolean present_inputMetric = true && (is_set_inputMetric());
+ list.add(present_inputMetric);
+ if (present_inputMetric)
+ list.add(inputMetric);
+
+ boolean present_outputMetric = true && (is_set_outputMetric());
+ list.add(present_outputMetric);
+ if (present_outputMetric)
+ list.add(outputMetric);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(MetricInfo other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_baseMetric()).compareTo(other.is_set_baseMetric());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_baseMetric()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.baseMetric, other.baseMetric);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_inputMetric()).compareTo(other.is_set_inputMetric());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_inputMetric()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.inputMetric, other.inputMetric);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_outputMetric()).compareTo(other.is_set_outputMetric());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_outputMetric()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.outputMetric, other.outputMetric);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("MetricInfo(");
+ boolean first = true;
+
+ sb.append("baseMetric:");
+ if (this.baseMetric == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.baseMetric);
+ }
+ first = false;
+ if (is_set_inputMetric()) {
+ if (!first) sb.append(", ");
+ sb.append("inputMetric:");
+ if (this.inputMetric == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.inputMetric);
+ }
+ first = false;
+ }
+ if (is_set_outputMetric()) {
+ if (!first) sb.append(", ");
+ sb.append("outputMetric:");
+ if (this.outputMetric == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.outputMetric);
+ }
+ first = false;
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_baseMetric()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'baseMetric' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class MetricInfoStandardSchemeFactory implements SchemeFactory {
+ public MetricInfoStandardScheme getScheme() {
+ return new MetricInfoStandardScheme();
+ }
+ }
+
+ private static class MetricInfoStandardScheme extends StandardScheme<MetricInfo> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, MetricInfo struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // BASE_METRIC
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map116 = iprot.readMapBegin();
+ struct.baseMetric = new HashMap<String,MetricWindow>(2*_map116.size);
+ String _key117;
+ MetricWindow _val118;
+ for (int _i119 = 0; _i119 < _map116.size; ++_i119)
+ {
+ _key117 = iprot.readString();
+ _val118 = new MetricWindow();
+ _val118.read(iprot);
+ struct.baseMetric.put(_key117, _val118);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_baseMetric_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // INPUT_METRIC
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map120 = iprot.readMapBegin();
+ struct.inputMetric = new HashMap<String,Map<String,MetricWindow>>(2*_map120.size);
+ String _key121;
+ Map<String,MetricWindow> _val122;
+ for (int _i123 = 0; _i123 < _map120.size; ++_i123)
+ {
+ _key121 = iprot.readString();
+ {
+ org.apache.thrift.protocol.TMap _map124 = iprot.readMapBegin();
+ _val122 = new HashMap<String,MetricWindow>(2*_map124.size);
+ String _key125;
+ MetricWindow _val126;
+ for (int _i127 = 0; _i127 < _map124.size; ++_i127)
+ {
+ _key125 = iprot.readString();
+ _val126 = new MetricWindow();
+ _val126.read(iprot);
+ _val122.put(_key125, _val126);
+ }
+ iprot.readMapEnd();
+ }
+ struct.inputMetric.put(_key121, _val122);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_inputMetric_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // OUTPUT_METRIC
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map128 = iprot.readMapBegin();
+ struct.outputMetric = new HashMap<String,Map<String,MetricWindow>>(2*_map128.size);
+ String _key129;
+ Map<String,MetricWindow> _val130;
+ for (int _i131 = 0; _i131 < _map128.size; ++_i131)
+ {
+ _key129 = iprot.readString();
+ {
+ org.apache.thrift.protocol.TMap _map132 = iprot.readMapBegin();
+ _val130 = new HashMap<String,MetricWindow>(2*_map132.size);
+ String _key133;
+ MetricWindow _val134;
+ for (int _i135 = 0; _i135 < _map132.size; ++_i135)
+ {
+ _key133 = iprot.readString();
+ _val134 = new MetricWindow();
+ _val134.read(iprot);
+ _val130.put(_key133, _val134);
+ }
+ iprot.readMapEnd();
+ }
+ struct.outputMetric.put(_key129, _val130);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_outputMetric_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, MetricInfo struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.baseMetric != null) {
+ oprot.writeFieldBegin(BASE_METRIC_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.baseMetric.size()));
+ for (Map.Entry<String, MetricWindow> _iter136 : struct.baseMetric.entrySet())
+ {
+ oprot.writeString(_iter136.getKey());
+ _iter136.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.inputMetric != null) {
+ if (struct.is_set_inputMetric()) {
+ oprot.writeFieldBegin(INPUT_METRIC_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.inputMetric.size()));
+ for (Map.Entry<String, Map<String,MetricWindow>> _iter137 : struct.inputMetric.entrySet())
+ {
+ oprot.writeString(_iter137.getKey());
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, _iter137.getValue().size()));
+ for (Map.Entry<String, MetricWindow> _iter138 : _iter137.getValue().entrySet())
+ {
+ oprot.writeString(_iter138.getKey());
+ _iter138.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ }
+ if (struct.outputMetric != null) {
+ if (struct.is_set_outputMetric()) {
+ oprot.writeFieldBegin(OUTPUT_METRIC_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, struct.outputMetric.size()));
+ for (Map.Entry<String, Map<String,MetricWindow>> _iter139 : struct.outputMetric.entrySet())
+ {
+ oprot.writeString(_iter139.getKey());
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, _iter139.getValue().size()));
+ for (Map.Entry<String, MetricWindow> _iter140 : _iter139.getValue().entrySet())
+ {
+ oprot.writeString(_iter140.getKey());
+ _iter140.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class MetricInfoTupleSchemeFactory implements SchemeFactory {
+ public MetricInfoTupleScheme getScheme() {
+ return new MetricInfoTupleScheme();
+ }
+ }
+
+ private static class MetricInfoTupleScheme extends TupleScheme<MetricInfo> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, MetricInfo struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ {
+ oprot.writeI32(struct.baseMetric.size());
+ for (Map.Entry<String, MetricWindow> _iter141 : struct.baseMetric.entrySet())
+ {
+ oprot.writeString(_iter141.getKey());
+ _iter141.getValue().write(oprot);
+ }
+ }
+ BitSet optionals = new BitSet();
+ if (struct.is_set_inputMetric()) {
+ optionals.set(0);
+ }
+ if (struct.is_set_outputMetric()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.is_set_inputMetric()) {
+ {
+ oprot.writeI32(struct.inputMetric.size());
+ for (Map.Entry<String, Map<String,MetricWindow>> _iter142 : struct.inputMetric.entrySet())
+ {
+ oprot.writeString(_iter142.getKey());
+ {
+ oprot.writeI32(_iter142.getValue().size());
+ for (Map.Entry<String, MetricWindow> _iter143 : _iter142.getValue().entrySet())
+ {
+ oprot.writeString(_iter143.getKey());
+ _iter143.getValue().write(oprot);
+ }
+ }
+ }
+ }
+ }
+ if (struct.is_set_outputMetric()) {
+ {
+ oprot.writeI32(struct.outputMetric.size());
+ for (Map.Entry<String, Map<String,MetricWindow>> _iter144 : struct.outputMetric.entrySet())
+ {
+ oprot.writeString(_iter144.getKey());
+ {
+ oprot.writeI32(_iter144.getValue().size());
+ for (Map.Entry<String, MetricWindow> _iter145 : _iter144.getValue().entrySet())
+ {
+ oprot.writeString(_iter145.getKey());
+ _iter145.getValue().write(oprot);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, MetricInfo struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ {
+ org.apache.thrift.protocol.TMap _map146 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.baseMetric = new HashMap<String,MetricWindow>(2*_map146.size);
+ String _key147;
+ MetricWindow _val148;
+ for (int _i149 = 0; _i149 < _map146.size; ++_i149)
+ {
+ _key147 = iprot.readString();
+ _val148 = new MetricWindow();
+ _val148.read(iprot);
+ struct.baseMetric.put(_key147, _val148);
+ }
+ }
+ struct.set_baseMetric_isSet(true);
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ {
+ org.apache.thrift.protocol.TMap _map150 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32());
+ struct.inputMetric = new HashMap<String,Map<String,MetricWindow>>(2*_map150.size);
+ String _key151;
+ Map<String,MetricWindow> _val152;
+ for (int _i153 = 0; _i153 < _map150.size; ++_i153)
+ {
+ _key151 = iprot.readString();
+ {
+ org.apache.thrift.protocol.TMap _map154 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ _val152 = new HashMap<String,MetricWindow>(2*_map154.size);
+ String _key155;
+ MetricWindow _val156;
+ for (int _i157 = 0; _i157 < _map154.size; ++_i157)
+ {
+ _key155 = iprot.readString();
+ _val156 = new MetricWindow();
+ _val156.read(iprot);
+ _val152.put(_key155, _val156);
+ }
+ }
+ struct.inputMetric.put(_key151, _val152);
+ }
+ }
+ struct.set_inputMetric_isSet(true);
+ }
+ if (incoming.get(1)) {
+ {
+ org.apache.thrift.protocol.TMap _map158 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.MAP, iprot.readI32());
+ struct.outputMetric = new HashMap<String,Map<String,MetricWindow>>(2*_map158.size);
+ String _key159;
+ Map<String,MetricWindow> _val160;
+ for (int _i161 = 0; _i161 < _map158.size; ++_i161)
+ {
+ _key159 = iprot.readString();
+ {
+ org.apache.thrift.protocol.TMap _map162 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ _val160 = new HashMap<String,MetricWindow>(2*_map162.size);
+ String _key163;
+ MetricWindow _val164;
+ for (int _i165 = 0; _i165 < _map162.size; ++_i165)
+ {
+ _key163 = iprot.readString();
+ _val164 = new MetricWindow();
+ _val164.read(iprot);
+ _val160.put(_key163, _val164);
+ }
+ }
+ struct.outputMetric.put(_key159, _val160);
+ }
+ }
+ struct.set_outputMetric_isSet(true);
+ }
+ }
+ }
+
+}
+
[09/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/list.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/list.java b/jstorm-core/src/main/java/backtype/storm/command/list.java
new file mode 100755
index 0000000..3b4efdb
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/list.java
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+
+import backtype.storm.generated.ClusterSummary;
+import backtype.storm.generated.TopologyInfo;
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Activate topology
+ *
+ * @author longda
+ *
+ */
+public class list {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+
+ NimbusClient client = null;
+ try {
+
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ if (args.length > 0 && StringUtils.isBlank(args[0]) == false) {
+ String topologyName = args[0];
+ TopologyInfo info = client.getClient().getTopologyInfoByName(topologyName);
+
+ System.out.println("Successfully get topology info \n" + Utils.toPrettyJsonString(info));
+ } else {
+ ClusterSummary clusterSummary = client.getClient().getClusterInfo();
+
+ System.out.println("Successfully get cluster info \n" + Utils.toPrettyJsonString(clusterSummary));
+ }
+
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/metrics_monitor.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/metrics_monitor.java b/jstorm-core/src/main/java/backtype/storm/command/metrics_monitor.java
new file mode 100755
index 0000000..6607445
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/metrics_monitor.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.util.Map;
+import java.security.InvalidParameterException;
+
+import backtype.storm.generated.MonitorOptions;
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Monitor topology
+ *
+ * @author Basti
+ *
+ */
+public class metrics_monitor {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length <= 1) {
+ throw new InvalidParameterException("Should input topology name and enable flag");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ boolean isEnable = Boolean.valueOf(args[1]).booleanValue();
+
+ MonitorOptions options = new MonitorOptions();
+ options.set_isEnable(isEnable);
+
+ client.getClient().metricMonitor(topologyName, options);
+
+ String str = (isEnable) ? "enable" : "disable";
+ System.out.println("Successfully submit command to " + str + " the monitor of " + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/rebalance.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/rebalance.java b/jstorm-core/src/main/java/backtype/storm/command/rebalance.java
new file mode 100755
index 0000000..f0cf69f
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/rebalance.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.generated.RebalanceOptions;
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Active topology
+ *
+ * @author longda
+ *
+ */
+public class rebalance {
+ static final String REASSIGN_FLAG = "-r";
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ printErrorInfo();
+ return;
+ }
+
+ int argsIndex = 0;
+ String topologyName = null;
+
+ try {
+ RebalanceOptions options = new RebalanceOptions();
+ options.set_reassign(false);
+ options.set_conf(null);
+
+ if (args[argsIndex].equalsIgnoreCase(REASSIGN_FLAG)) {
+ options.set_reassign(true);
+ argsIndex++;
+ if (args.length <= argsIndex) {
+ // Topology name is not set.
+ printErrorInfo();
+ return;
+ } else {
+ topologyName = args[argsIndex];
+ }
+ } else {
+ topologyName = args[argsIndex];
+ }
+
+ argsIndex++;
+ if (args.length > argsIndex) {
+ for (int i = argsIndex; i < args.length; i++) {
+ String arg = args[i];
+ if (arg.endsWith("yaml") || arg.endsWith("prop")) {
+ Map userConf = Utils.loadConf(arg);
+ String jsonConf = Utils.to_json(userConf);
+ options.set_conf(jsonConf);
+ } else {
+ try {
+ int delaySeconds = Integer.parseInt(args[1]);
+ options.set_wait_secs(delaySeconds);
+ } catch (NumberFormatException e) {
+ System.out.println("Unsupported argument found, arg=" + arg + ". Full args are " + args);
+ printErrorInfo();
+ return;
+ }
+ }
+ }
+ }
+
+ submitRebalance(topologyName, options);
+
+ System.out.println("Successfully submit command rebalance " + topologyName + ", delaySecs=" + options.get_wait_secs() + ", reassignFlag=" + options.is_reassign() + ", newConfiguration=" + options.get_conf());
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static void printErrorInfo() {
+ System.out.println("Error: Invalid parameters!");
+ System.out.println("USAGE: jstorm rebalance [-r] TopologyName [DelayTime] [NewConfig]");
+ }
+
+ public static void submitRebalance(String topologyName, RebalanceOptions options) throws Exception {
+ submitRebalance(topologyName, options, null);
+ }
+
+ public static void submitRebalance(String topologyName, RebalanceOptions options, Map conf) throws Exception {
+ Map stormConf = Utils.readStormConfig();
+ if (conf != null) {
+ stormConf.putAll(conf);
+ }
+
+ NimbusClient client = null;
+ try {
+ client = NimbusClient.getConfiguredClient(stormConf);
+ client.getClient().rebalance(topologyName, options);
+ } catch (Exception e) {
+ throw e;
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/restart.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/restart.java b/jstorm-core/src/main/java/backtype/storm/command/restart.java
new file mode 100755
index 0000000..ecec9a3
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/restart.java
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStream;
+import java.security.InvalidParameterException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Properties;
+
+import org.yaml.snakeyaml.Yaml;
+
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Active topology
+ *
+ * @author basti
+ *
+ */
+public class restart {
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ throw new InvalidParameterException("Should input topology name");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ System.out.println("It will take 15 ~ 100 seconds to restart, please wait patiently\n");
+
+ if (args.length == 1) {
+ client.getClient().restart(topologyName, null);
+ } else {
+ Map loadConf = Utils.loadConf(args[1]);
+ String jsonConf = Utils.to_json(loadConf);
+ System.out.println("New configuration:\n" + jsonConf);
+
+ client.getClient().restart(topologyName, jsonConf);
+ }
+
+ System.out.println("Successfully submit command restart " + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/update_config.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/update_config.java b/jstorm-core/src/main/java/backtype/storm/command/update_config.java
new file mode 100644
index 0000000..be78f19
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/update_config.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Update user configuration
+ *
+ * @author basti
+ *
+ */
+public class update_config {
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length < 2) {
+ throw new InvalidParameterException(
+ "[USAGE] update_config topologyName config");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ Map loadConf = Utils.loadConf(args[1]);
+ String jsonConf = Utils.to_json(loadConf);
+ System.out.println("New configuration:\n" + jsonConf);
+
+ client.getClient().updateConf(topologyName, jsonConf);
+
+ System.out.println("Successfully submit command update_conf "
+ + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java b/jstorm-core/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
new file mode 100755
index 0000000..8653010
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
@@ -0,0 +1,108 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
+import backtype.storm.coordination.CoordinatedBolt.TimeoutCallback;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.FailedException;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.utils.Utils;
+import java.util.HashMap;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class BatchBoltExecutor implements IRichBolt, FinishedCallback, TimeoutCallback {
+ public static Logger LOG = LoggerFactory.getLogger(BatchBoltExecutor.class);
+
+ byte[] _boltSer;
+ Map<Object, IBatchBolt> _openTransactions;
+ Map _conf;
+ TopologyContext _context;
+ BatchOutputCollectorImpl _collector;
+
+ public BatchBoltExecutor(IBatchBolt bolt) {
+ _boltSer = Utils.javaSerialize(bolt);
+ }
+
+ @Override
+ public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
+ _conf = conf;
+ _context = context;
+ _collector = new BatchOutputCollectorImpl(collector);
+ _openTransactions = new HashMap<Object, IBatchBolt>();
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ Object id = input.getValue(0);
+ IBatchBolt bolt = getBatchBolt(id);
+ try {
+ bolt.execute(input);
+ _collector.ack(input);
+ } catch(FailedException e) {
+ LOG.error("Failed to process tuple in batch", e);
+ _collector.fail(input);
+ }
+ }
+
+ @Override
+ public void cleanup() {
+ }
+
+ @Override
+ public void finishedId(Object id) {
+ IBatchBolt bolt = getBatchBolt(id);
+ _openTransactions.remove(id);
+ bolt.finishBatch();
+ }
+
+ @Override
+ public void timeoutId(Object attempt) {
+ _openTransactions.remove(attempt);
+ }
+
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ newTransactionalBolt().declareOutputFields(declarer);
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ return newTransactionalBolt().getComponentConfiguration();
+ }
+
+ private IBatchBolt getBatchBolt(Object id) {
+ IBatchBolt bolt = _openTransactions.get(id);
+ if(bolt==null) {
+ bolt = newTransactionalBolt();
+ bolt.prepare(_conf, _context, _collector, id);
+ _openTransactions.put(id, bolt);
+ }
+ return bolt;
+ }
+
+ private IBatchBolt newTransactionalBolt() {
+ return Utils.javaDeserialize(_boltSer, IBatchBolt.class);
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollector.java b/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
new file mode 100755
index 0000000..f5f3457
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.utils.Utils;
+import java.util.List;
+
+public abstract class BatchOutputCollector {
+
+ /**
+ * Emits a tuple to the default output stream.
+ */
+ public List<Integer> emit(List<Object> tuple) {
+ return emit(Utils.DEFAULT_STREAM_ID, tuple);
+ }
+
+ public abstract List<Integer> emit(String streamId, List<Object> tuple);
+
+ /**
+ * Emits a tuple to the specified task on the default output stream. This output
+ * stream must have been declared as a direct stream, and the specified task must
+ * use a direct grouping on this stream to receive the message.
+ */
+ public void emitDirect(int taskId, List<Object> tuple) {
+ emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple);
+ }
+
+ public abstract void emitDirect(int taskId, String streamId, List<Object> tuple);
+
+ public abstract void reportError(Throwable error);
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java b/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
new file mode 100755
index 0000000..cae7560
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.tuple.Tuple;
+import java.util.List;
+
+public class BatchOutputCollectorImpl extends BatchOutputCollector {
+ OutputCollector _collector;
+
+ public BatchOutputCollectorImpl(OutputCollector collector) {
+ _collector = collector;
+ }
+
+ @Override
+ public List<Integer> emit(String streamId, List<Object> tuple) {
+ return _collector.emit(streamId, tuple);
+ }
+
+ @Override
+ public void emitDirect(int taskId, String streamId, List<Object> tuple) {
+ _collector.emitDirect(taskId, streamId, tuple);
+ }
+
+ @Override
+ public void reportError(Throwable error) {
+ _collector.reportError(error);
+ }
+
+ public void ack(Tuple tup) {
+ _collector.ack(tup);
+ }
+
+ public void fail(Tuple tup) {
+ _collector.fail(tup);
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java b/jstorm-core/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
new file mode 100755
index 0000000..2a77f3b
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
@@ -0,0 +1,479 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.Constants;
+import backtype.storm.coordination.CoordinatedBolt.SourceArgs;
+import backtype.storm.generated.GlobalStreamId;
+import backtype.storm.generated.Grouping;
+import backtype.storm.grouping.CustomStreamGrouping;
+import backtype.storm.grouping.PartialKeyGrouping;
+import backtype.storm.topology.BaseConfigurationDeclarer;
+import backtype.storm.topology.BasicBoltExecutor;
+import backtype.storm.topology.BoltDeclarer;
+import backtype.storm.topology.IBasicBolt;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.InputDeclarer;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.tuple.Fields;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+public class BatchSubtopologyBuilder {
+ Map<String, Component> _bolts = new HashMap<String, Component>();
+ Component _masterBolt;
+ String _masterId;
+
+ public BatchSubtopologyBuilder(String masterBoltId, IBasicBolt masterBolt, Number boltParallelism) {
+ Integer p = boltParallelism == null ? null : boltParallelism.intValue();
+ _masterBolt = new Component(new BasicBoltExecutor(masterBolt), p);
+ _masterId = masterBoltId;
+ }
+
+ public BatchSubtopologyBuilder(String masterBoltId, IBasicBolt masterBolt) {
+ this(masterBoltId, masterBolt, null);
+ }
+
+ public BoltDeclarer getMasterDeclarer() {
+ return new BoltDeclarerImpl(_masterBolt);
+ }
+
+ public BoltDeclarer setBolt(String id, IBatchBolt bolt) {
+ return setBolt(id, bolt, null);
+ }
+
+ public BoltDeclarer setBolt(String id, IBatchBolt bolt, Number parallelism) {
+ return setBolt(id, new BatchBoltExecutor(bolt), parallelism);
+ }
+
+ public BoltDeclarer setBolt(String id, IBasicBolt bolt) {
+ return setBolt(id, bolt, null);
+ }
+
+ public BoltDeclarer setBolt(String id, IBasicBolt bolt, Number parallelism) {
+ return setBolt(id, new BasicBoltExecutor(bolt), parallelism);
+ }
+
+ private BoltDeclarer setBolt(String id, IRichBolt bolt, Number parallelism) {
+ Integer p = null;
+ if(parallelism!=null) p = parallelism.intValue();
+ Component component = new Component(bolt, p);
+ _bolts.put(id, component);
+ return new BoltDeclarerImpl(component);
+ }
+
+ public void extendTopology(TopologyBuilder builder) {
+ BoltDeclarer declarer = builder.setBolt(_masterId, new CoordinatedBolt(_masterBolt.bolt), _masterBolt.parallelism);
+ for(InputDeclaration decl: _masterBolt.declarations) {
+ decl.declare(declarer);
+ }
+ for(Map conf: _masterBolt.componentConfs) {
+ declarer.addConfigurations(conf);
+ }
+ for(String id: _bolts.keySet()) {
+ Component component = _bolts.get(id);
+ Map<String, SourceArgs> coordinatedArgs = new HashMap<String, SourceArgs>();
+ for(String c: componentBoltSubscriptions(component)) {
+ SourceArgs source;
+ if(c.equals(_masterId)) {
+ source = SourceArgs.single();
+ } else {
+ source = SourceArgs.all();
+ }
+ coordinatedArgs.put(c, source);
+ }
+
+
+ BoltDeclarer input = builder.setBolt(id,
+ new CoordinatedBolt(component.bolt,
+ coordinatedArgs,
+ null),
+ component.parallelism);
+ for(Map conf: component.componentConfs) {
+ input.addConfigurations(conf);
+ }
+ for(String c: componentBoltSubscriptions(component)) {
+ input.directGrouping(c, Constants.COORDINATED_STREAM_ID);
+ }
+ for(InputDeclaration d: component.declarations) {
+ d.declare(input);
+ }
+ }
+ }
+
+ private Set<String> componentBoltSubscriptions(Component component) {
+ Set<String> ret = new HashSet<String>();
+ for(InputDeclaration d: component.declarations) {
+ ret.add(d.getComponent());
+ }
+ return ret;
+ }
+
+ private static class Component {
+ public IRichBolt bolt;
+ public Integer parallelism;
+ public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
+ public List<Map> componentConfs = new ArrayList<Map>();
+
+ public Component(IRichBolt bolt, Integer parallelism) {
+ this.bolt = bolt;
+ this.parallelism = parallelism;
+ }
+ }
+
+ private static interface InputDeclaration {
+ void declare(InputDeclarer declarer);
+ String getComponent();
+ }
+
+ private class BoltDeclarerImpl extends BaseConfigurationDeclarer<BoltDeclarer> implements BoltDeclarer {
+ Component _component;
+
+ public BoltDeclarerImpl(Component component) {
+ _component = component;
+ }
+
+ @Override
+ public BoltDeclarer fieldsGrouping(final String component, final Fields fields) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.fieldsGrouping(component, fields);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer fieldsGrouping(final String component, final String streamId, final Fields fields) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.fieldsGrouping(component, streamId, fields);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer globalGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.globalGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer globalGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.globalGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer shuffleGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.shuffleGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer shuffleGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.shuffleGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer localOrShuffleGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.localOrShuffleGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer localOrShuffleGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.localOrShuffleGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer localFirstGrouping(final String componentId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.localFirstGrouping(componentId);
+ }
+
+ @Override
+ public String getComponent() {
+ return componentId;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer localFirstGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.localFirstGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer noneGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.noneGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer noneGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.noneGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer allGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.allGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer allGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.allGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer directGrouping(final String component) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.directGrouping(component);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer directGrouping(final String component, final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.directGrouping(component, streamId);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer partialKeyGrouping(String componentId, Fields fields) {
+ return customGrouping(componentId, new PartialKeyGrouping(fields));
+ }
+
+ @Override
+ public BoltDeclarer partialKeyGrouping(String componentId, String streamId, Fields fields) {
+ return customGrouping(componentId, streamId, new PartialKeyGrouping(fields));
+ }
+
+ @Override
+ public BoltDeclarer customGrouping(final String component, final CustomStreamGrouping grouping) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.customGrouping(component, grouping);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer customGrouping(final String component, final String streamId, final CustomStreamGrouping grouping) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.customGrouping(component, streamId, grouping);
+ }
+
+ @Override
+ public String getComponent() {
+ return component;
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public BoltDeclarer grouping(final GlobalStreamId stream, final Grouping grouping) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(InputDeclarer declarer) {
+ declarer.grouping(stream, grouping);
+ }
+
+ @Override
+ public String getComponent() {
+ return stream.get_componentId();
+ }
+ });
+ return this;
+ }
+
+ private void addDeclaration(InputDeclaration declaration) {
+ _component.declarations.add(declaration);
+ }
+
+ @Override
+ public BoltDeclarer addConfigurations(Map conf) {
+ _component.componentConfs.add(conf);
+ return this;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/CoordinatedBolt.java b/jstorm-core/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
new file mode 100755
index 0000000..6f337a6
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
@@ -0,0 +1,382 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.topology.FailedException;
+import java.util.Map.Entry;
+import backtype.storm.tuple.Values;
+import backtype.storm.generated.GlobalStreamId;
+import java.util.Collection;
+import backtype.storm.Constants;
+import backtype.storm.generated.Grouping;
+import backtype.storm.task.IOutputCollector;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.utils.TimeCacheMap;
+import backtype.storm.utils.Utils;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import static backtype.storm.utils.Utils.get;
+
+/**
+ * Coordination requires the request ids to be globally unique for awhile. This is so it doesn't get confused
+ * in the case of retries.
+ */
+public class CoordinatedBolt implements IRichBolt {
+ public static Logger LOG = LoggerFactory.getLogger(CoordinatedBolt.class);
+
+ public static interface FinishedCallback {
+ void finishedId(Object id);
+ }
+
+ public static interface TimeoutCallback {
+ void timeoutId(Object id);
+ }
+
+
+ public static class SourceArgs implements Serializable {
+ public boolean singleCount;
+
+ protected SourceArgs(boolean singleCount) {
+ this.singleCount = singleCount;
+ }
+
+ public static SourceArgs single() {
+ return new SourceArgs(true);
+ }
+
+ public static SourceArgs all() {
+ return new SourceArgs(false);
+ }
+
+ @Override
+ public String toString() {
+ return "<Single: " + singleCount + ">";
+ }
+ }
+
+ public class CoordinatedOutputCollector implements IOutputCollector {
+ IOutputCollector _delegate;
+
+ public CoordinatedOutputCollector(IOutputCollector delegate) {
+ _delegate = delegate;
+ }
+
+ public List<Integer> emit(String stream, Collection<Tuple> anchors, List<Object> tuple) {
+ List<Integer> tasks = _delegate.emit(stream, anchors, tuple);
+ updateTaskCounts(tuple.get(0), tasks);
+ return tasks;
+ }
+
+ public void emitDirect(int task, String stream, Collection<Tuple> anchors, List<Object> tuple) {
+ updateTaskCounts(tuple.get(0), Arrays.asList(task));
+ _delegate.emitDirect(task, stream, anchors, tuple);
+ }
+
+ public void ack(Tuple tuple) {
+ Object id = tuple.getValue(0);
+ synchronized(_tracked) {
+ TrackingInfo track = _tracked.get(id);
+ if (track != null)
+ track.receivedTuples++;
+ }
+ boolean failed = checkFinishId(tuple, TupleType.REGULAR);
+ if(failed) {
+ _delegate.fail(tuple);
+ } else {
+ _delegate.ack(tuple);
+ }
+ }
+
+ public void fail(Tuple tuple) {
+ Object id = tuple.getValue(0);
+ synchronized(_tracked) {
+ TrackingInfo track = _tracked.get(id);
+ if (track != null)
+ track.failed = true;
+ }
+ checkFinishId(tuple, TupleType.REGULAR);
+ _delegate.fail(tuple);
+ }
+
+ public void reportError(Throwable error) {
+ _delegate.reportError(error);
+ }
+
+
+ private void updateTaskCounts(Object id, List<Integer> tasks) {
+ synchronized(_tracked) {
+ TrackingInfo track = _tracked.get(id);
+ if (track != null) {
+ Map<Integer, Integer> taskEmittedTuples = track.taskEmittedTuples;
+ for(Integer task: tasks) {
+ int newCount = get(taskEmittedTuples, task, 0) + 1;
+ taskEmittedTuples.put(task, newCount);
+ }
+ }
+ }
+ }
+ }
+
+ private Map<String, SourceArgs> _sourceArgs;
+ private IdStreamSpec _idStreamSpec;
+ private IRichBolt _delegate;
+ private Integer _numSourceReports;
+ private List<Integer> _countOutTasks = new ArrayList<Integer>();;
+ private OutputCollector _collector;
+ private TimeCacheMap<Object, TrackingInfo> _tracked;
+
+ public static class TrackingInfo {
+ int reportCount = 0;
+ int expectedTupleCount = 0;
+ int receivedTuples = 0;
+ boolean failed = false;
+ Map<Integer, Integer> taskEmittedTuples = new HashMap<Integer, Integer>();
+ boolean receivedId = false;
+ boolean finished = false;
+ List<Tuple> ackTuples = new ArrayList<Tuple>();
+
+ @Override
+ public String toString() {
+ return "reportCount: " + reportCount + "\n" +
+ "expectedTupleCount: " + expectedTupleCount + "\n" +
+ "receivedTuples: " + receivedTuples + "\n" +
+ "failed: " + failed + "\n" +
+ taskEmittedTuples.toString();
+ }
+ }
+
+
+ public static class IdStreamSpec implements Serializable {
+ GlobalStreamId _id;
+
+ public GlobalStreamId getGlobalStreamId() {
+ return _id;
+ }
+
+ public static IdStreamSpec makeDetectSpec(String component, String stream) {
+ return new IdStreamSpec(component, stream);
+ }
+
+ protected IdStreamSpec(String component, String stream) {
+ _id = new GlobalStreamId(component, stream);
+ }
+ }
+
+ public CoordinatedBolt(IRichBolt delegate) {
+ this(delegate, null, null);
+ }
+
+ public CoordinatedBolt(IRichBolt delegate, String sourceComponent, SourceArgs sourceArgs, IdStreamSpec idStreamSpec) {
+ this(delegate, singleSourceArgs(sourceComponent, sourceArgs), idStreamSpec);
+ }
+
+ public CoordinatedBolt(IRichBolt delegate, Map<String, SourceArgs> sourceArgs, IdStreamSpec idStreamSpec) {
+ _sourceArgs = sourceArgs;
+ if(_sourceArgs==null) _sourceArgs = new HashMap<String, SourceArgs>();
+ _delegate = delegate;
+ _idStreamSpec = idStreamSpec;
+ }
+
+ public void prepare(Map config, TopologyContext context, OutputCollector collector) {
+ TimeCacheMap.ExpiredCallback<Object, TrackingInfo> callback = null;
+ if(_delegate instanceof TimeoutCallback) {
+ callback = new TimeoutItems();
+ }
+ _tracked = new TimeCacheMap<Object, TrackingInfo>(context.maxTopologyMessageTimeout(), callback);
+ _collector = collector;
+ _delegate.prepare(config, context, new OutputCollector(new CoordinatedOutputCollector(collector)));
+ for(String component: Utils.get(context.getThisTargets(),
+ Constants.COORDINATED_STREAM_ID,
+ new HashMap<String, Grouping>())
+ .keySet()) {
+ for(Integer task: context.getComponentTasks(component)) {
+ _countOutTasks.add(task);
+ }
+ }
+ if(!_sourceArgs.isEmpty()) {
+ _numSourceReports = 0;
+ for(Entry<String, SourceArgs> entry: _sourceArgs.entrySet()) {
+ if(entry.getValue().singleCount) {
+ _numSourceReports+=1;
+ } else {
+ _numSourceReports+=context.getComponentTasks(entry.getKey()).size();
+ }
+ }
+ }
+ }
+
+ private boolean checkFinishId(Tuple tup, TupleType type) {
+ Object id = tup.getValue(0);
+ boolean failed = false;
+
+ synchronized(_tracked) {
+ TrackingInfo track = _tracked.get(id);
+ try {
+ if(track!=null) {
+ boolean delayed = false;
+ if(_idStreamSpec==null && type == TupleType.COORD || _idStreamSpec!=null && type==TupleType.ID) {
+ track.ackTuples.add(tup);
+ delayed = true;
+ }
+ if(track.failed) {
+ failed = true;
+ for(Tuple t: track.ackTuples) {
+ _collector.fail(t);
+ }
+ _tracked.remove(id);
+ } else if(track.receivedId
+ && (_sourceArgs.isEmpty() ||
+ track.reportCount==_numSourceReports &&
+ track.expectedTupleCount == track.receivedTuples)){
+ if(_delegate instanceof FinishedCallback) {
+ ((FinishedCallback)_delegate).finishedId(id);
+ }
+ if(!(_sourceArgs.isEmpty() || type!=TupleType.REGULAR)) {
+ throw new IllegalStateException("Coordination condition met on a non-coordinating tuple. Should be impossible");
+ }
+ Iterator<Integer> outTasks = _countOutTasks.iterator();
+ while(outTasks.hasNext()) {
+ int task = outTasks.next();
+ int numTuples = get(track.taskEmittedTuples, task, 0);
+ _collector.emitDirect(task, Constants.COORDINATED_STREAM_ID, tup, new Values(id, numTuples));
+ }
+ for(Tuple t: track.ackTuples) {
+ _collector.ack(t);
+ }
+ track.finished = true;
+ _tracked.remove(id);
+ }
+ if(!delayed && type!=TupleType.REGULAR) {
+ if(track.failed) {
+ _collector.fail(tup);
+ } else {
+ _collector.ack(tup);
+ }
+ }
+ } else {
+ if(type!=TupleType.REGULAR) _collector.fail(tup);
+ }
+ } catch(FailedException e) {
+ LOG.error("Failed to finish batch", e);
+ for(Tuple t: track.ackTuples) {
+ _collector.fail(t);
+ }
+ _tracked.remove(id);
+ failed = true;
+ }
+ }
+ return failed;
+ }
+
+ public void execute(Tuple tuple) {
+ Object id = tuple.getValue(0);
+ TrackingInfo track;
+ TupleType type = getTupleType(tuple);
+ synchronized(_tracked) {
+ track = _tracked.get(id);
+ if(track==null) {
+ track = new TrackingInfo();
+ if(_idStreamSpec==null) track.receivedId = true;
+ _tracked.put(id, track);
+ }
+ }
+
+ if(type==TupleType.ID) {
+ synchronized(_tracked) {
+ track.receivedId = true;
+ }
+ checkFinishId(tuple, type);
+ } else if(type==TupleType.COORD) {
+ int count = (Integer) tuple.getValue(1);
+ synchronized(_tracked) {
+ track.reportCount++;
+ track.expectedTupleCount+=count;
+ }
+ checkFinishId(tuple, type);
+ } else {
+ synchronized(_tracked) {
+ _delegate.execute(tuple);
+ }
+ }
+ }
+
+ public void cleanup() {
+ _delegate.cleanup();
+ _tracked.cleanup();
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ _delegate.declareOutputFields(declarer);
+ declarer.declareStream(Constants.COORDINATED_STREAM_ID, true, new Fields("id", "count"));
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ return _delegate.getComponentConfiguration();
+ }
+
+ private static Map<String, SourceArgs> singleSourceArgs(String sourceComponent, SourceArgs sourceArgs) {
+ Map<String, SourceArgs> ret = new HashMap<String, SourceArgs>();
+ ret.put(sourceComponent, sourceArgs);
+ return ret;
+ }
+
+ private class TimeoutItems implements TimeCacheMap.ExpiredCallback<Object, TrackingInfo> {
+ @Override
+ public void expire(Object id, TrackingInfo val) {
+ synchronized(_tracked) {
+ // the combination of the lock and the finished flag ensure that
+ // an id is never timed out if it has been finished
+ val.failed = true;
+ if(!val.finished) {
+ ((TimeoutCallback) _delegate).timeoutId(id);
+ }
+ }
+ }
+ }
+
+ private TupleType getTupleType(Tuple tuple) {
+ if(_idStreamSpec!=null
+ && tuple.getSourceGlobalStreamid().equals(_idStreamSpec._id)) {
+ return TupleType.ID;
+ } else if(!_sourceArgs.isEmpty()
+ && tuple.getSourceStreamId().equals(Constants.COORDINATED_STREAM_ID)) {
+ return TupleType.COORD;
+ } else {
+ return TupleType.REGULAR;
+ }
+ }
+
+ static enum TupleType {
+ REGULAR,
+ ID,
+ COORD
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/coordination/IBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/coordination/IBatchBolt.java b/jstorm-core/src/main/java/backtype/storm/coordination/IBatchBolt.java
new file mode 100755
index 0000000..ee5d9bd
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/coordination/IBatchBolt.java
@@ -0,0 +1,30 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.coordination;
+
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IComponent;
+import backtype.storm.tuple.Tuple;
+import java.io.Serializable;
+import java.util.Map;
+
+public interface IBatchBolt<T> extends Serializable, IComponent {
+ void prepare(Map conf, TopologyContext context, BatchOutputCollector collector, T id);
+ void execute(Tuple tuple);
+ void finishBatch();
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/daemon/Shutdownable.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/daemon/Shutdownable.java b/jstorm-core/src/main/java/backtype/storm/daemon/Shutdownable.java
new file mode 100755
index 0000000..b1d8ddf
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/daemon/Shutdownable.java
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.daemon;
+
+public interface Shutdownable {
+ public void shutdown();
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java b/jstorm-core/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
new file mode 100755
index 0000000..624db3e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+import backtype.storm.generated.DRPCRequest;
+import backtype.storm.generated.DistributedRPCInvocations;
+import backtype.storm.generated.AuthorizationException;
+import backtype.storm.security.auth.ThriftClient;
+import backtype.storm.security.auth.ThriftConnectionType;
+import org.apache.thrift.transport.TTransportException;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DRPCInvocationsClient extends ThriftClient implements DistributedRPCInvocations.Iface {
+ public static Logger LOG = LoggerFactory.getLogger(DRPCInvocationsClient.class);
+ private final AtomicReference<DistributedRPCInvocations.Client> client =
+ new AtomicReference<DistributedRPCInvocations.Client>();
+ private String host;
+ private int port;
+
+ public DRPCInvocationsClient(Map conf, String host, int port) throws TTransportException {
+ super(conf, ThriftConnectionType.DRPC_INVOCATIONS, host, port, null);
+ this.host = host;
+ this.port = port;
+ client.set(new DistributedRPCInvocations.Client(_protocol));
+ }
+
+ public String getHost() {
+ return host;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ public void reconnectClient() throws TException {
+ if (client.get() == null) {
+ reconnect();
+ client.set(new DistributedRPCInvocations.Client(_protocol));
+ }
+ }
+
+ public boolean isConnected() {
+ return client.get() != null;
+ }
+
+ public void result(String id, String result) throws TException, AuthorizationException {
+ DistributedRPCInvocations.Client c = client.get();
+ try {
+ if (c == null) {
+ throw new TException("Client is not connected...");
+ }
+ c.result(id, result);
+ } catch(AuthorizationException aze) {
+ throw aze;
+ } catch(TException e) {
+ client.compareAndSet(c, null);
+ throw e;
+ }
+ }
+
+ public DRPCRequest fetchRequest(String func) throws TException, AuthorizationException {
+ DistributedRPCInvocations.Client c = client.get();
+ try {
+ if (c == null) {
+ throw new TException("Client is not connected...");
+ }
+ return c.fetchRequest(func);
+ } catch(AuthorizationException aze) {
+ throw aze;
+ } catch(TException e) {
+ client.compareAndSet(c, null);
+ throw e;
+ }
+ }
+
+ public void failRequest(String id) throws TException, AuthorizationException {
+ DistributedRPCInvocations.Client c = client.get();
+ try {
+ if (c == null) {
+ throw new TException("Client is not connected...");
+ }
+ c.failRequest(id);
+ } catch(AuthorizationException aze) {
+ throw aze;
+ } catch(TException e) {
+ client.compareAndSet(c, null);
+ throw e;
+ }
+ }
+
+ public DistributedRPCInvocations.Client getClient() {
+ return client.get();
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/DRPCSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/DRPCSpout.java b/jstorm-core/src/main/java/backtype/storm/drpc/DRPCSpout.java
new file mode 100644
index 0000000..4ed24d4
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/DRPCSpout.java
@@ -0,0 +1,263 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
+import java.util.concurrent.SynchronousQueue;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.thrift.TException;
+import org.json.simple.JSONValue;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.jstorm.utils.NetWorkUtils;
+
+import backtype.storm.Config;
+import backtype.storm.ILocalDRPC;
+import backtype.storm.generated.AuthorizationException;
+import backtype.storm.generated.DRPCRequest;
+import backtype.storm.generated.DistributedRPCInvocations;
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichSpout;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Values;
+import backtype.storm.utils.ExtendedThreadPoolExecutor;
+import backtype.storm.utils.ServiceRegistry;
+import backtype.storm.utils.Utils;
+
+public class DRPCSpout extends BaseRichSpout {
+ //ANY CHANGE TO THIS CODE MUST BE SERIALIZABLE COMPATIBLE OR THERE WILL BE PROBLEMS
+ static final long serialVersionUID = 2387848310969237877L;
+
+ public static Logger LOG = LoggerFactory.getLogger(DRPCSpout.class);
+
+ SpoutOutputCollector _collector;
+ List<DRPCInvocationsClient> _clients = new ArrayList<DRPCInvocationsClient>();
+ transient LinkedList<Future<Void>> _futures = null;
+ transient ExecutorService _backround = null;
+ String _function;
+ String _local_drpc_id = null;
+
+ private static class DRPCMessageId {
+ String id;
+ int index;
+
+ public DRPCMessageId(String id, int index) {
+ this.id = id;
+ this.index = index;
+ }
+ }
+
+
+ public DRPCSpout(String function) {
+ _function = function;
+ }
+
+ public DRPCSpout(String function, ILocalDRPC drpc) {
+ _function = function;
+ _local_drpc_id = drpc.getServiceId();
+ }
+
+ private class Adder implements Callable<Void> {
+ private String server;
+ private int port;
+ private Map conf;
+
+ public Adder(String server, int port, Map conf) {
+ this.server = server;
+ this.port = port;
+ this.conf = conf;
+ }
+
+ @Override
+ public Void call() throws Exception {
+ DRPCInvocationsClient c = new DRPCInvocationsClient(conf, server, port);
+ synchronized (_clients) {
+ _clients.add(c);
+ }
+ return null;
+ }
+ }
+
+ private void reconnect(final DRPCInvocationsClient c) {
+ _futures.add(_backround.submit(new Callable<Void>() {
+ @Override
+ public Void call() throws Exception {
+ c.reconnectClient();
+ return null;
+ }
+ }));
+ }
+
+ private void checkFutures() {
+ Iterator<Future<Void>> i = _futures.iterator();
+ while (i.hasNext()) {
+ Future<Void> f = i.next();
+ if (f.isDone()) {
+ i.remove();
+ }
+ try {
+ f.get();
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+
+
+
+ @Override
+ public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
+ _collector = collector;
+ if(_local_drpc_id==null) {
+ _backround = new ExtendedThreadPoolExecutor(0, Integer.MAX_VALUE,
+ 60L, TimeUnit.SECONDS,
+ new SynchronousQueue<Runnable>());
+ _futures = new LinkedList<Future<Void>>();
+
+ int numTasks = context.getComponentTasks(context.getThisComponentId()).size();
+ int index = context.getThisTaskIndex();
+
+ int port = Utils.getInt(conf.get(Config.DRPC_INVOCATIONS_PORT));
+ List<String> servers = NetWorkUtils.host2Ip((List<String>) conf.get(Config.DRPC_SERVERS));
+
+ if(servers == null || servers.isEmpty()) {
+ throw new RuntimeException("No DRPC servers configured for topology");
+ }
+
+ if (numTasks < servers.size()) {
+ for (String s: servers) {
+ _futures.add(_backround.submit(new Adder(s, port, conf)));
+ }
+ } else {
+ int i = index % servers.size();
+ _futures.add(_backround.submit(new Adder(servers.get(i), port, conf)));
+ }
+ }
+
+ }
+
+ @Override
+ public void close() {
+ for(DRPCInvocationsClient client: _clients) {
+ client.close();
+ }
+ }
+
+ @Override
+ public void nextTuple() {
+ boolean gotRequest = false;
+ if(_local_drpc_id==null) {
+ int size = 0;
+ synchronized (_clients) {
+ size = _clients.size(); //This will only ever grow, so no need to worry about falling off the end
+ }
+ for(int i=0; i<size; i++) {
+ DRPCInvocationsClient client;
+ synchronized (_clients) {
+ client = _clients.get(i);
+ }
+ if (!client.isConnected()) {
+ continue;
+ }
+ try {
+ DRPCRequest req = client.fetchRequest(_function);
+ if(req.get_request_id().length() > 0) {
+ Map returnInfo = new HashMap();
+ returnInfo.put("id", req.get_request_id());
+ returnInfo.put("host", client.getHost());
+ returnInfo.put("port", client.getPort());
+ gotRequest = true;
+ _collector.emit(new Values(req.get_func_args(), JSONValue.toJSONString(returnInfo)), new DRPCMessageId(req.get_request_id(), i));
+ break;
+ }
+ } catch (AuthorizationException aze) {
+ reconnect(client);
+ LOG.error("Not authorized to fetch DRPC result from DRPC server", aze);
+ } catch (TException e) {
+ reconnect(client);
+ LOG.error("Failed to fetch DRPC result from DRPC server", e);
+ } catch (Exception e) {
+ LOG.error("Failed to fetch DRPC result from DRPC server", e);
+ }
+ }
+ checkFutures();
+ } else {
+ DistributedRPCInvocations.Iface drpc = (DistributedRPCInvocations.Iface) ServiceRegistry.getService(_local_drpc_id);
+ if(drpc!=null) { // can happen during shutdown of drpc while topology is still up
+ try {
+ DRPCRequest req = drpc.fetchRequest(_function);
+ if(req.get_request_id().length() > 0) {
+ Map returnInfo = new HashMap();
+ returnInfo.put("id", req.get_request_id());
+ returnInfo.put("host", _local_drpc_id);
+ returnInfo.put("port", 0);
+ gotRequest = true;
+ _collector.emit(new Values(req.get_func_args(), JSONValue.toJSONString(returnInfo)), new DRPCMessageId(req.get_request_id(), 0));
+ }
+ } catch (AuthorizationException aze) {
+ throw new RuntimeException(aze);
+ } catch (TException e) {
+ throw new RuntimeException(e);
+ }
+ }
+ }
+ if(!gotRequest) {
+ Utils.sleep(1);
+ }
+ }
+
+ @Override
+ public void ack(Object msgId) {
+ }
+
+ @Override
+ public void fail(Object msgId) {
+ DRPCMessageId did = (DRPCMessageId) msgId;
+ DistributedRPCInvocations.Iface client;
+
+ if(_local_drpc_id == null) {
+ client = _clients.get(did.index);
+ } else {
+ client = (DistributedRPCInvocations.Iface) ServiceRegistry.getService(_local_drpc_id);
+ }
+ try {
+ client.failRequest(did.id);
+ } catch (AuthorizationException aze) {
+ LOG.error("Not authorized to failREquest from DRPC server", aze);
+ } catch (TException e) {
+ LOG.error("Failed to fail request", e);
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("args", "return-info"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/JoinResult.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/JoinResult.java b/jstorm-core/src/main/java/backtype/storm/drpc/JoinResult.java
new file mode 100755
index 0000000..b74b97e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/JoinResult.java
@@ -0,0 +1,75 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class JoinResult extends BaseRichBolt {
+ public static Logger LOG = LoggerFactory.getLogger(JoinResult.class);
+
+ String returnComponent;
+ Map<Object, Tuple> returns = new HashMap<Object, Tuple>();
+ Map<Object, Tuple> results = new HashMap<Object, Tuple>();
+ OutputCollector _collector;
+
+ public JoinResult(String returnComponent) {
+ this.returnComponent = returnComponent;
+ }
+
+ public void prepare(Map map, TopologyContext context, OutputCollector collector) {
+ _collector = collector;
+ }
+
+ public void execute(Tuple tuple) {
+ Object requestId = tuple.getValue(0);
+ if(tuple.getSourceComponent().equals(returnComponent)) {
+ returns.put(requestId, tuple);
+ } else {
+ results.put(requestId, tuple);
+ }
+
+ if(returns.containsKey(requestId) && results.containsKey(requestId)) {
+ Tuple result = results.remove(requestId);
+ Tuple returner = returns.remove(requestId);
+ LOG.debug(result.getValue(1).toString());
+ List<Tuple> anchors = new ArrayList<Tuple>();
+ anchors.add(result);
+ anchors.add(returner);
+ _collector.emit(anchors, new Values(""+result.getValue(1), returner.getValue(1)));
+ _collector.ack(result);
+ _collector.ack(returner);
+ }
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("result", "return-info"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/KeyedFairBolt.java b/jstorm-core/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
new file mode 100755
index 0000000..113163d
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.BasicBoltExecutor;
+import backtype.storm.topology.IBasicBolt;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.utils.KeyedRoundRobinQueue;
+import java.util.HashMap;
+import java.util.Map;
+
+
+public class KeyedFairBolt implements IRichBolt, FinishedCallback {
+ IRichBolt _delegate;
+ KeyedRoundRobinQueue<Tuple> _rrQueue;
+ Thread _executor;
+ FinishedCallback _callback;
+
+ public KeyedFairBolt(IRichBolt delegate) {
+ _delegate = delegate;
+ }
+
+ public KeyedFairBolt(IBasicBolt delegate) {
+ this(new BasicBoltExecutor(delegate));
+ }
+
+
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ if(_delegate instanceof FinishedCallback) {
+ _callback = (FinishedCallback) _delegate;
+ }
+ _delegate.prepare(stormConf, context, collector);
+ _rrQueue = new KeyedRoundRobinQueue<Tuple>();
+ _executor = new Thread(new Runnable() {
+ public void run() {
+ try {
+ while(true) {
+ _delegate.execute(_rrQueue.take());
+ }
+ } catch (InterruptedException e) {
+
+ }
+ }
+ });
+ _executor.setDaemon(true);
+ _executor.start();
+ }
+
+ public void execute(Tuple input) {
+ Object key = input.getValue(0);
+ _rrQueue.add(key, input);
+ }
+
+ public void cleanup() {
+ _executor.interrupt();
+ _delegate.cleanup();
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ _delegate.declareOutputFields(declarer);
+ }
+
+ public void finishedId(Object id) {
+ if(_callback!=null) {
+ _callback.finishedId(id);
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ return new HashMap<String, Object>();
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java b/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
new file mode 100755
index 0000000..d03075e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
@@ -0,0 +1,52 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.grouping.CustomStreamGrouping;
+import backtype.storm.topology.ComponentConfigurationDeclarer;
+import backtype.storm.tuple.Fields;
+
+public interface LinearDRPCInputDeclarer extends ComponentConfigurationDeclarer<LinearDRPCInputDeclarer> {
+ public LinearDRPCInputDeclarer fieldsGrouping(Fields fields);
+ public LinearDRPCInputDeclarer fieldsGrouping(String streamId, Fields fields);
+
+ public LinearDRPCInputDeclarer globalGrouping();
+ public LinearDRPCInputDeclarer globalGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer shuffleGrouping();
+ public LinearDRPCInputDeclarer shuffleGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer localOrShuffleGrouping();
+ public LinearDRPCInputDeclarer localOrShuffleGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer noneGrouping();
+ public LinearDRPCInputDeclarer noneGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer allGrouping();
+ public LinearDRPCInputDeclarer allGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer directGrouping();
+ public LinearDRPCInputDeclarer directGrouping(String streamId);
+
+ public LinearDRPCInputDeclarer partialKeyGrouping(Fields fields);
+ public LinearDRPCInputDeclarer partialKeyGrouping(String streamId, Fields fields);
+
+ public LinearDRPCInputDeclarer customGrouping(CustomStreamGrouping grouping);
+ public LinearDRPCInputDeclarer customGrouping(String streamId, CustomStreamGrouping grouping);
+
+}
[53/60] [abbrv] storm git commit: removed jstorm-on-yarn subdirectory
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/generated/StormMaster.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/generated/StormMaster.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/generated/StormMaster.java
deleted file mode 100644
index b6ca97c..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/generated/StormMaster.java
+++ /dev/null
@@ -1,5113 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package com.alibaba.jstorm.yarn.generated;
-
-import java.util.BitSet;
-import java.util.Collections;
-import java.util.EnumMap;
-import java.util.EnumSet;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class StormMaster {
-
- public interface Iface {
-
- public String getStormConf() throws org.apache.thrift7.TException;
-
- public void setStormConf(String storm_conf) throws org.apache.thrift7.TException;
-
- public void addSupervisors(int number) throws org.apache.thrift7.TException;
-
- public void startNimbus() throws org.apache.thrift7.TException;
-
- public void stopNimbus() throws org.apache.thrift7.TException;
-
- public void startUI() throws org.apache.thrift7.TException;
-
- public void stopUI() throws org.apache.thrift7.TException;
-
- public void startSupervisors() throws org.apache.thrift7.TException;
-
- public void stopSupervisors() throws org.apache.thrift7.TException;
-
- public void shutdown() throws org.apache.thrift7.TException;
-
- }
-
- public interface AsyncIface {
-
- public void getStormConf(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getStormConf_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void setStormConf(String storm_conf, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.setStormConf_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void addSupervisors(int number, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.addSupervisors_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void startNimbus(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.startNimbus_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void stopNimbus(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.stopNimbus_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void startUI(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.startUI_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void stopUI(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.stopUI_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void startSupervisors(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.startSupervisors_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void stopSupervisors(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.stopSupervisors_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void shutdown(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.shutdown_call> resultHandler) throws org.apache.thrift7.TException;
-
- }
-
- public static class Client extends org.apache.thrift7.TServiceClient implements Iface {
- public static class Factory implements org.apache.thrift7.TServiceClientFactory<Client> {
- public Factory() {}
- public Client getClient(org.apache.thrift7.protocol.TProtocol prot) {
- return new Client(prot);
- }
- public Client getClient(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- return new Client(iprot, oprot);
- }
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol prot)
- {
- super(prot, prot);
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- super(iprot, oprot);
- }
-
- public String getStormConf() throws org.apache.thrift7.TException
- {
- send_getStormConf();
- return recv_getStormConf();
- }
-
- public void send_getStormConf() throws org.apache.thrift7.TException
- {
- getStormConf_args args = new getStormConf_args();
- sendBase("getStormConf", args);
- }
-
- public String recv_getStormConf() throws org.apache.thrift7.TException
- {
- getStormConf_result result = new getStormConf_result();
- receiveBase(result, "getStormConf");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getStormConf failed: unknown result");
- }
-
- public void setStormConf(String storm_conf) throws org.apache.thrift7.TException
- {
- send_setStormConf(storm_conf);
- recv_setStormConf();
- }
-
- public void send_setStormConf(String storm_conf) throws org.apache.thrift7.TException
- {
- setStormConf_args args = new setStormConf_args();
- args.set_storm_conf(storm_conf);
- sendBase("setStormConf", args);
- }
-
- public void recv_setStormConf() throws org.apache.thrift7.TException
- {
- setStormConf_result result = new setStormConf_result();
- receiveBase(result, "setStormConf");
- return;
- }
-
- public void addSupervisors(int number) throws org.apache.thrift7.TException
- {
- send_addSupervisors(number);
- recv_addSupervisors();
- }
-
- public void send_addSupervisors(int number) throws org.apache.thrift7.TException
- {
- addSupervisors_args args = new addSupervisors_args();
- args.set_number(number);
- sendBase("addSupervisors", args);
- }
-
- public void recv_addSupervisors() throws org.apache.thrift7.TException
- {
- addSupervisors_result result = new addSupervisors_result();
- receiveBase(result, "addSupervisors");
- return;
- }
-
- public void startNimbus() throws org.apache.thrift7.TException
- {
- send_startNimbus();
- recv_startNimbus();
- }
-
- public void send_startNimbus() throws org.apache.thrift7.TException
- {
- startNimbus_args args = new startNimbus_args();
- sendBase("startNimbus", args);
- }
-
- public void recv_startNimbus() throws org.apache.thrift7.TException
- {
- startNimbus_result result = new startNimbus_result();
- receiveBase(result, "startNimbus");
- return;
- }
-
- public void stopNimbus() throws org.apache.thrift7.TException
- {
- send_stopNimbus();
- recv_stopNimbus();
- }
-
- public void send_stopNimbus() throws org.apache.thrift7.TException
- {
- stopNimbus_args args = new stopNimbus_args();
- sendBase("stopNimbus", args);
- }
-
- public void recv_stopNimbus() throws org.apache.thrift7.TException
- {
- stopNimbus_result result = new stopNimbus_result();
- receiveBase(result, "stopNimbus");
- return;
- }
-
- public void startUI() throws org.apache.thrift7.TException
- {
- send_startUI();
- recv_startUI();
- }
-
- public void send_startUI() throws org.apache.thrift7.TException
- {
- startUI_args args = new startUI_args();
- sendBase("startUI", args);
- }
-
- public void recv_startUI() throws org.apache.thrift7.TException
- {
- startUI_result result = new startUI_result();
- receiveBase(result, "startUI");
- return;
- }
-
- public void stopUI() throws org.apache.thrift7.TException
- {
- send_stopUI();
- recv_stopUI();
- }
-
- public void send_stopUI() throws org.apache.thrift7.TException
- {
- stopUI_args args = new stopUI_args();
- sendBase("stopUI", args);
- }
-
- public void recv_stopUI() throws org.apache.thrift7.TException
- {
- stopUI_result result = new stopUI_result();
- receiveBase(result, "stopUI");
- return;
- }
-
- public void startSupervisors() throws org.apache.thrift7.TException
- {
- send_startSupervisors();
- recv_startSupervisors();
- }
-
- public void send_startSupervisors() throws org.apache.thrift7.TException
- {
- startSupervisors_args args = new startSupervisors_args();
- sendBase("startSupervisors", args);
- }
-
- public void recv_startSupervisors() throws org.apache.thrift7.TException
- {
- startSupervisors_result result = new startSupervisors_result();
- receiveBase(result, "startSupervisors");
- return;
- }
-
- public void stopSupervisors() throws org.apache.thrift7.TException
- {
- send_stopSupervisors();
- recv_stopSupervisors();
- }
-
- public void send_stopSupervisors() throws org.apache.thrift7.TException
- {
- stopSupervisors_args args = new stopSupervisors_args();
- sendBase("stopSupervisors", args);
- }
-
- public void recv_stopSupervisors() throws org.apache.thrift7.TException
- {
- stopSupervisors_result result = new stopSupervisors_result();
- receiveBase(result, "stopSupervisors");
- return;
- }
-
- public void shutdown() throws org.apache.thrift7.TException
- {
- send_shutdown();
- recv_shutdown();
- }
-
- public void send_shutdown() throws org.apache.thrift7.TException
- {
- shutdown_args args = new shutdown_args();
- sendBase("shutdown", args);
- }
-
- public void recv_shutdown() throws org.apache.thrift7.TException
- {
- shutdown_result result = new shutdown_result();
- receiveBase(result, "shutdown");
- return;
- }
-
- }
- public static class AsyncClient extends org.apache.thrift7.async.TAsyncClient implements AsyncIface {
- public static class Factory implements org.apache.thrift7.async.TAsyncClientFactory<AsyncClient> {
- private org.apache.thrift7.async.TAsyncClientManager clientManager;
- private org.apache.thrift7.protocol.TProtocolFactory protocolFactory;
- public Factory(org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.protocol.TProtocolFactory protocolFactory) {
- this.clientManager = clientManager;
- this.protocolFactory = protocolFactory;
- }
- public AsyncClient getAsyncClient(org.apache.thrift7.transport.TNonblockingTransport transport) {
- return new AsyncClient(protocolFactory, clientManager, transport);
- }
- }
-
- public AsyncClient(org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.transport.TNonblockingTransport transport) {
- super(protocolFactory, clientManager, transport);
- }
-
- public void getStormConf(org.apache.thrift7.async.AsyncMethodCallback<getStormConf_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getStormConf_call method_call = new getStormConf_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getStormConf_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public getStormConf_call(org.apache.thrift7.async.AsyncMethodCallback<getStormConf_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getStormConf", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getStormConf_args args = new getStormConf_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getStormConf();
- }
- }
-
- public void setStormConf(String storm_conf, org.apache.thrift7.async.AsyncMethodCallback<setStormConf_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- setStormConf_call method_call = new setStormConf_call(storm_conf, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class setStormConf_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String storm_conf;
- public setStormConf_call(String storm_conf, org.apache.thrift7.async.AsyncMethodCallback<setStormConf_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.storm_conf = storm_conf;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("setStormConf", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- setStormConf_args args = new setStormConf_args();
- args.set_storm_conf(storm_conf);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_setStormConf();
- }
- }
-
- public void addSupervisors(int number, org.apache.thrift7.async.AsyncMethodCallback<addSupervisors_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- addSupervisors_call method_call = new addSupervisors_call(number, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class addSupervisors_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private int number;
- public addSupervisors_call(int number, org.apache.thrift7.async.AsyncMethodCallback<addSupervisors_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.number = number;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("addSupervisors", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- addSupervisors_args args = new addSupervisors_args();
- args.set_number(number);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_addSupervisors();
- }
- }
-
- public void startNimbus(org.apache.thrift7.async.AsyncMethodCallback<startNimbus_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- startNimbus_call method_call = new startNimbus_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class startNimbus_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public startNimbus_call(org.apache.thrift7.async.AsyncMethodCallback<startNimbus_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("startNimbus", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- startNimbus_args args = new startNimbus_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_startNimbus();
- }
- }
-
- public void stopNimbus(org.apache.thrift7.async.AsyncMethodCallback<stopNimbus_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- stopNimbus_call method_call = new stopNimbus_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class stopNimbus_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public stopNimbus_call(org.apache.thrift7.async.AsyncMethodCallback<stopNimbus_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("stopNimbus", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- stopNimbus_args args = new stopNimbus_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_stopNimbus();
- }
- }
-
- public void startUI(org.apache.thrift7.async.AsyncMethodCallback<startUI_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- startUI_call method_call = new startUI_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class startUI_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public startUI_call(org.apache.thrift7.async.AsyncMethodCallback<startUI_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("startUI", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- startUI_args args = new startUI_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_startUI();
- }
- }
-
- public void stopUI(org.apache.thrift7.async.AsyncMethodCallback<stopUI_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- stopUI_call method_call = new stopUI_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class stopUI_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public stopUI_call(org.apache.thrift7.async.AsyncMethodCallback<stopUI_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("stopUI", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- stopUI_args args = new stopUI_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_stopUI();
- }
- }
-
- public void startSupervisors(org.apache.thrift7.async.AsyncMethodCallback<startSupervisors_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- startSupervisors_call method_call = new startSupervisors_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class startSupervisors_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public startSupervisors_call(org.apache.thrift7.async.AsyncMethodCallback<startSupervisors_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("startSupervisors", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- startSupervisors_args args = new startSupervisors_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_startSupervisors();
- }
- }
-
- public void stopSupervisors(org.apache.thrift7.async.AsyncMethodCallback<stopSupervisors_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- stopSupervisors_call method_call = new stopSupervisors_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class stopSupervisors_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public stopSupervisors_call(org.apache.thrift7.async.AsyncMethodCallback<stopSupervisors_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("stopSupervisors", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- stopSupervisors_args args = new stopSupervisors_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_stopSupervisors();
- }
- }
-
- public void shutdown(org.apache.thrift7.async.AsyncMethodCallback<shutdown_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- shutdown_call method_call = new shutdown_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class shutdown_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public shutdown_call(org.apache.thrift7.async.AsyncMethodCallback<shutdown_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("shutdown", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- shutdown_args args = new shutdown_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_shutdown();
- }
- }
-
- }
-
- public static class Processor<I extends Iface> extends org.apache.thrift7.TBaseProcessor implements org.apache.thrift7.TProcessor {
- private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
- public Processor(I iface) {
- super(iface, getProcessMap(new HashMap<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>>()));
- }
-
- protected Processor(I iface, Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- super(iface, getProcessMap(processMap));
- }
-
- private static <I extends Iface> Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> getProcessMap(Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- processMap.put("getStormConf", new getStormConf());
- processMap.put("setStormConf", new setStormConf());
- processMap.put("addSupervisors", new addSupervisors());
- processMap.put("startNimbus", new startNimbus());
- processMap.put("stopNimbus", new stopNimbus());
- processMap.put("startUI", new startUI());
- processMap.put("stopUI", new stopUI());
- processMap.put("startSupervisors", new startSupervisors());
- processMap.put("stopSupervisors", new stopSupervisors());
- processMap.put("shutdown", new shutdown());
- return processMap;
- }
-
- private static class getStormConf<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, getStormConf_args> {
- public getStormConf() {
- super("getStormConf");
- }
-
- protected getStormConf_args getEmptyArgsInstance() {
- return new getStormConf_args();
- }
-
- protected getStormConf_result getResult(I iface, getStormConf_args args) throws org.apache.thrift7.TException {
- getStormConf_result result = new getStormConf_result();
- result.success = iface.getStormConf();
- return result;
- }
- }
-
- private static class setStormConf<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, setStormConf_args> {
- public setStormConf() {
- super("setStormConf");
- }
-
- protected setStormConf_args getEmptyArgsInstance() {
- return new setStormConf_args();
- }
-
- protected setStormConf_result getResult(I iface, setStormConf_args args) throws org.apache.thrift7.TException {
- setStormConf_result result = new setStormConf_result();
- iface.setStormConf(args.storm_conf);
- return result;
- }
- }
-
- private static class addSupervisors<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, addSupervisors_args> {
- public addSupervisors() {
- super("addSupervisors");
- }
-
- protected addSupervisors_args getEmptyArgsInstance() {
- return new addSupervisors_args();
- }
-
- protected addSupervisors_result getResult(I iface, addSupervisors_args args) throws org.apache.thrift7.TException {
- addSupervisors_result result = new addSupervisors_result();
- iface.addSupervisors(args.number);
- return result;
- }
- }
-
- private static class startNimbus<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, startNimbus_args> {
- public startNimbus() {
- super("startNimbus");
- }
-
- protected startNimbus_args getEmptyArgsInstance() {
- return new startNimbus_args();
- }
-
- protected startNimbus_result getResult(I iface, startNimbus_args args) throws org.apache.thrift7.TException {
- startNimbus_result result = new startNimbus_result();
- iface.startNimbus();
- return result;
- }
- }
-
- private static class stopNimbus<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, stopNimbus_args> {
- public stopNimbus() {
- super("stopNimbus");
- }
-
- protected stopNimbus_args getEmptyArgsInstance() {
- return new stopNimbus_args();
- }
-
- protected stopNimbus_result getResult(I iface, stopNimbus_args args) throws org.apache.thrift7.TException {
- stopNimbus_result result = new stopNimbus_result();
- iface.stopNimbus();
- return result;
- }
- }
-
- private static class startUI<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, startUI_args> {
- public startUI() {
- super("startUI");
- }
-
- protected startUI_args getEmptyArgsInstance() {
- return new startUI_args();
- }
-
- protected startUI_result getResult(I iface, startUI_args args) throws org.apache.thrift7.TException {
- startUI_result result = new startUI_result();
- iface.startUI();
- return result;
- }
- }
-
- private static class stopUI<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, stopUI_args> {
- public stopUI() {
- super("stopUI");
- }
-
- protected stopUI_args getEmptyArgsInstance() {
- return new stopUI_args();
- }
-
- protected stopUI_result getResult(I iface, stopUI_args args) throws org.apache.thrift7.TException {
- stopUI_result result = new stopUI_result();
- iface.stopUI();
- return result;
- }
- }
-
- private static class startSupervisors<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, startSupervisors_args> {
- public startSupervisors() {
- super("startSupervisors");
- }
-
- protected startSupervisors_args getEmptyArgsInstance() {
- return new startSupervisors_args();
- }
-
- protected startSupervisors_result getResult(I iface, startSupervisors_args args) throws org.apache.thrift7.TException {
- startSupervisors_result result = new startSupervisors_result();
- iface.startSupervisors();
- return result;
- }
- }
-
- private static class stopSupervisors<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, stopSupervisors_args> {
- public stopSupervisors() {
- super("stopSupervisors");
- }
-
- protected stopSupervisors_args getEmptyArgsInstance() {
- return new stopSupervisors_args();
- }
-
- protected stopSupervisors_result getResult(I iface, stopSupervisors_args args) throws org.apache.thrift7.TException {
- stopSupervisors_result result = new stopSupervisors_result();
- iface.stopSupervisors();
- return result;
- }
- }
-
- private static class shutdown<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, shutdown_args> {
- public shutdown() {
- super("shutdown");
- }
-
- protected shutdown_args getEmptyArgsInstance() {
- return new shutdown_args();
- }
-
- protected shutdown_result getResult(I iface, shutdown_args args) throws org.apache.thrift7.TException {
- shutdown_result result = new shutdown_result();
- iface.shutdown();
- return result;
- }
- }
-
- }
-
- public static class getStormConf_args implements org.apache.thrift7.TBase<getStormConf_args, getStormConf_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("getStormConf_args");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(getStormConf_args.class, metaDataMap);
- }
-
- public getStormConf_args() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public getStormConf_args(getStormConf_args other) {
- }
-
- public getStormConf_args deepCopy() {
- return new getStormConf_args(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof getStormConf_args)
- return this.equals((getStormConf_args)that);
- return false;
- }
-
- public boolean equals(getStormConf_args that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(getStormConf_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- getStormConf_args typedOther = (getStormConf_args)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("getStormConf_args(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class getStormConf_result implements org.apache.thrift7.TBase<getStormConf_result, getStormConf_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("getStormConf_result");
-
- private static final org.apache.thrift7.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift7.protocol.TField("success", org.apache.thrift7.protocol.TType.STRING, (short)0);
-
- private String success; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SUCCESS((short)0, "success");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 0: // SUCCESS
- return SUCCESS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUCCESS, new org.apache.thrift7.meta_data.FieldMetaData("success", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(getStormConf_result.class, metaDataMap);
- }
-
- public getStormConf_result() {
- }
-
- public getStormConf_result(
- String success)
- {
- this();
- this.success = success;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public getStormConf_result(getStormConf_result other) {
- if (other.is_set_success()) {
- this.success = other.success;
- }
- }
-
- public getStormConf_result deepCopy() {
- return new getStormConf_result(this);
- }
-
- @Override
- public void clear() {
- this.success = null;
- }
-
- public String get_success() {
- return this.success;
- }
-
- public void set_success(String success) {
- this.success = success;
- }
-
- public void unset_success() {
- this.success = null;
- }
-
- /** Returns true if field success is set (has been assigned a value) and false otherwise */
- public boolean is_set_success() {
- return this.success != null;
- }
-
- public void set_success_isSet(boolean value) {
- if (!value) {
- this.success = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SUCCESS:
- if (value == null) {
- unset_success();
- } else {
- set_success((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SUCCESS:
- return get_success();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SUCCESS:
- return is_set_success();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof getStormConf_result)
- return this.equals((getStormConf_result)that);
- return false;
- }
-
- public boolean equals(getStormConf_result that) {
- if (that == null)
- return false;
-
- boolean this_present_success = true && this.is_set_success();
- boolean that_present_success = true && that.is_set_success();
- if (this_present_success || that_present_success) {
- if (!(this_present_success && that_present_success))
- return false;
- if (!this.success.equals(that.success))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_success = true && (is_set_success());
- builder.append(present_success);
- if (present_success)
- builder.append(success);
-
- return builder.toHashCode();
- }
-
- public int compareTo(getStormConf_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- getStormConf_result typedOther = (getStormConf_result)other;
-
- lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_success()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.success, typedOther.success);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 0: // SUCCESS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.success = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- if (this.is_set_success()) {
- oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
- oprot.writeString(this.success);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("getStormConf_result(");
- boolean first = true;
-
- sb.append("success:");
- if (this.success == null) {
- sb.append("null");
- } else {
- sb.append(this.success);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class setStormConf_args implements org.apache.thrift7.TBase<setStormConf_args, setStormConf_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("setStormConf_args");
-
- private static final org.apache.thrift7.protocol.TField STORM_CONF_FIELD_DESC = new org.apache.thrift7.protocol.TField("storm_conf", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String storm_conf; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- STORM_CONF((short)1, "storm_conf");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // STORM_CONF
- return STORM_CONF;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.STORM_CONF, new org.apache.thrift7.meta_data.FieldMetaData("storm_conf", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(setStormConf_args.class, metaDataMap);
- }
-
- public setStormConf_args() {
- }
-
- public setStormConf_args(
- String storm_conf)
- {
- this();
- this.storm_conf = storm_conf;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public setStormConf_args(setStormConf_args other) {
- if (other.is_set_storm_conf()) {
- this.storm_conf = other.storm_conf;
- }
- }
-
- public setStormConf_args deepCopy() {
- return new setStormConf_args(this);
- }
-
- @Override
- public void clear() {
- this.storm_conf = null;
- }
-
- public String get_storm_conf() {
- return this.storm_conf;
- }
-
- public void set_storm_conf(String storm_conf) {
- this.storm_conf = storm_conf;
- }
-
- public void unset_storm_conf() {
- this.storm_conf = null;
- }
-
- /** Returns true if field storm_conf is set (has been assigned a value) and false otherwise */
- public boolean is_set_storm_conf() {
- return this.storm_conf != null;
- }
-
- public void set_storm_conf_isSet(boolean value) {
- if (!value) {
- this.storm_conf = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case STORM_CONF:
- if (value == null) {
- unset_storm_conf();
- } else {
- set_storm_conf((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case STORM_CONF:
- return get_storm_conf();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case STORM_CONF:
- return is_set_storm_conf();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof setStormConf_args)
- return this.equals((setStormConf_args)that);
- return false;
- }
-
- public boolean equals(setStormConf_args that) {
- if (that == null)
- return false;
-
- boolean this_present_storm_conf = true && this.is_set_storm_conf();
- boolean that_present_storm_conf = true && that.is_set_storm_conf();
- if (this_present_storm_conf || that_present_storm_conf) {
- if (!(this_present_storm_conf && that_present_storm_conf))
- return false;
- if (!this.storm_conf.equals(that.storm_conf))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_storm_conf = true && (is_set_storm_conf());
- builder.append(present_storm_conf);
- if (present_storm_conf)
- builder.append(storm_conf);
-
- return builder.toHashCode();
- }
-
- public int compareTo(setStormConf_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- setStormConf_args typedOther = (setStormConf_args)other;
-
- lastComparison = Boolean.valueOf(is_set_storm_conf()).compareTo(typedOther.is_set_storm_conf());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_storm_conf()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.storm_conf, typedOther.storm_conf);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // STORM_CONF
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.storm_conf = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.storm_conf != null) {
- oprot.writeFieldBegin(STORM_CONF_FIELD_DESC);
- oprot.writeString(this.storm_conf);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("setStormConf_args(");
- boolean first = true;
-
- sb.append("storm_conf:");
- if (this.storm_conf == null) {
- sb.append("null");
- } else {
- sb.append(this.storm_conf);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class setStormConf_result implements org.apache.thrift7.TBase<setStormConf_result, setStormConf_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("setStormConf_result");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(setStormConf_result.class, metaDataMap);
- }
-
- public setStormConf_result() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public setStormConf_result(setStormConf_result other) {
- }
-
- public setStormConf_result deepCopy() {
- return new setStormConf_result(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof setStormConf_result)
- return this.equals((setStormConf_result)that);
- return false;
- }
-
- public boolean equals(setStormConf_result that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(setStormConf_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- setStormConf_result typedOther = (setStormConf_result)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("setStormConf_result(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class addSupervisors_args implements org.apache.thrift7.TBase<addSupervisors_args, addSupervisors_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("addSupervisors_args");
-
- private static final org.apache.thrift7.protocol.TField NUMBER_FIELD_DESC = new org.apache.thrift7.protocol.TField("number", org.apache.thrift7.protocol.TType.I32, (short)1);
-
- private int number; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- NUMBER((short)1, "number");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // NUMBER
- return NUMBER;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __NUMBER_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.NUMBER, new org.apache.thrift7.meta_data.FieldMetaData("number", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(addSupervisors_args.class, metaDataMap);
- }
-
- public addSupervisors_args() {
- }
-
- public addSupervisors_args(
- int number)
- {
- this();
- this.number = number;
- set_number_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public addSupervisors_args(addSupervisors_args other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.number = other.number;
- }
-
- public addSupervisors_args deepCopy() {
- return new addSupervisors_args(this);
- }
-
- @Override
- public void clear() {
- set_number_isSet(false);
- this.number = 0;
- }
-
- public int get_number() {
- return this.number;
- }
-
- public void set_number(int number) {
- this.number = number;
- set_number_isSet(true);
- }
-
- public void unset_number() {
- __isset_bit_vector.clear(__NUMBER_ISSET_ID);
- }
-
- /** Returns true if field number is set (has been assigned a value) and false otherwise */
- public boolean is_set_number() {
- return __isset_bit_vector.get(__NUMBER_ISSET_ID);
- }
-
- public void set_number_isSet(boolean value) {
- __isset_bit_vector.set(__NUMBER_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case NUMBER:
- if (value == null) {
- unset_number();
- } else {
- set_number((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case NUMBER:
- return Integer.valueOf(get_number());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case NUMBER:
- return is_set_number();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof addSupervisors_args)
- return this.equals((addSupervisors_args)that);
- return false;
- }
-
- public boolean equals(addSupervisors_args that) {
- if (that == null)
- return false;
-
- boolean this_present_number = true;
- boolean that_present_number = true;
- if (this_present_number || that_present_number) {
- if (!(this_present_number && that_present_number))
- return false;
- if (this.number != that.number)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_number = true;
- builder.append(present_number);
- if (present_number)
- builder.append(number);
-
- return builder.toHashCode();
- }
-
- public int compareTo(addSupervisors_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- addSupervisors_args typedOther = (addSupervisors_args)other;
-
- lastComparison = Boolean.valueOf(is_set_number()).compareTo(typedOther.is_set_number());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_number()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.number, typedOther.number);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // NUMBER
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.number = iprot.readI32();
- set_number_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldBegin(NUMBER_FIELD_DESC);
- oprot.writeI32(this.number);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("addSupervisors_args(");
- boolean first = true;
-
- sb.append("number:");
- sb.append(this.number);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class addSupervisors_result implements org.apache.thrift7.TBase<addSupervisors_result, addSupervisors_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("addSupervisors_result");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(addSupervisors_result.class, metaDataMap);
- }
-
- public addSupervisors_result() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public addSupervisors_result(addSupervisors_result other) {
- }
-
- public addSupervisors_result deepCopy() {
- return new addSupervisors_result(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof addSupervisors_result)
- return this.equals((addSupervisors_result)that);
- return false;
- }
-
- public boolean equals(addSupervisors_result that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(addSupervisors_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- addSupervisors_result typedOther = (addSupervisors_result)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("addSupervisors_result(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class startNimbus_args implements org.apache.thrift7.TBase<startNimbus_args, startNimbus_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("startNimbus_args");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(startNimbus_args.class, metaDataMap);
- }
-
- public startNimbus_args() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public startNimbus_args(startNimbus_args other) {
- }
-
- public startNimbus_args deepCopy() {
- return new startNimbus_args(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof startNimbus_args)
- return this.equals((startNimbus_args)that);
- return false;
- }
-
- public boolean equals(startNimbus_args that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(startNimbus_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- startNimbus_args typedOther = (startNimbus_args)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("startNimbus_args(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class startNimbus_result implements org.apache.thrift7.TB
<TRUNCATED>
[35/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/NotAliveException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/NotAliveException.java b/jstorm-client/src/main/java/backtype/storm/generated/NotAliveException.java
deleted file mode 100644
index 30d18ed..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/NotAliveException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NotAliveException extends Exception implements org.apache.thrift7.TBase<NotAliveException, NotAliveException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("NotAliveException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(NotAliveException.class, metaDataMap);
- }
-
- public NotAliveException() {
- }
-
- public NotAliveException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public NotAliveException(NotAliveException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public NotAliveException deepCopy() {
- return new NotAliveException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof NotAliveException)
- return this.equals((NotAliveException)that);
- return false;
- }
-
- public boolean equals(NotAliveException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(NotAliveException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- NotAliveException typedOther = (NotAliveException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("NotAliveException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/NullStruct.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/NullStruct.java b/jstorm-client/src/main/java/backtype/storm/generated/NullStruct.java
deleted file mode 100644
index ce89767..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/NullStruct.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class NullStruct implements org.apache.thrift7.TBase<NullStruct, NullStruct._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("NullStruct");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(NullStruct.class, metaDataMap);
- }
-
- public NullStruct() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public NullStruct(NullStruct other) {
- }
-
- public NullStruct deepCopy() {
- return new NullStruct(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof NullStruct)
- return this.equals((NullStruct)that);
- return false;
- }
-
- public boolean equals(NullStruct that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(NullStruct other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- NullStruct typedOther = (NullStruct)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("NullStruct(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/RebalanceOptions.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/RebalanceOptions.java b/jstorm-client/src/main/java/backtype/storm/generated/RebalanceOptions.java
deleted file mode 100644
index dde038d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/RebalanceOptions.java
+++ /dev/null
@@ -1,409 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class RebalanceOptions implements org.apache.thrift7.TBase<RebalanceOptions, RebalanceOptions._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("RebalanceOptions");
-
- private static final org.apache.thrift7.protocol.TField WAIT_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("wait_secs", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField NUM_WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("num_workers", org.apache.thrift7.protocol.TType.I32, (short)2);
-
- private int wait_secs; // required
- private int num_workers; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- WAIT_SECS((short)1, "wait_secs"),
- NUM_WORKERS((short)2, "num_workers");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // WAIT_SECS
- return WAIT_SECS;
- case 2: // NUM_WORKERS
- return NUM_WORKERS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __WAIT_SECS_ISSET_ID = 0;
- private static final int __NUM_WORKERS_ISSET_ID = 1;
- private BitSet __isset_bit_vector = new BitSet(2);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.WAIT_SECS, new org.apache.thrift7.meta_data.FieldMetaData("wait_secs", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.NUM_WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("num_workers", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(RebalanceOptions.class, metaDataMap);
- }
-
- public RebalanceOptions() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public RebalanceOptions(RebalanceOptions other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.wait_secs = other.wait_secs;
- this.num_workers = other.num_workers;
- }
-
- public RebalanceOptions deepCopy() {
- return new RebalanceOptions(this);
- }
-
- @Override
- public void clear() {
- set_wait_secs_isSet(false);
- this.wait_secs = 0;
- set_num_workers_isSet(false);
- this.num_workers = 0;
- }
-
- public int get_wait_secs() {
- return this.wait_secs;
- }
-
- public void set_wait_secs(int wait_secs) {
- this.wait_secs = wait_secs;
- set_wait_secs_isSet(true);
- }
-
- public void unset_wait_secs() {
- __isset_bit_vector.clear(__WAIT_SECS_ISSET_ID);
- }
-
- /** Returns true if field wait_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_wait_secs() {
- return __isset_bit_vector.get(__WAIT_SECS_ISSET_ID);
- }
-
- public void set_wait_secs_isSet(boolean value) {
- __isset_bit_vector.set(__WAIT_SECS_ISSET_ID, value);
- }
-
- public int get_num_workers() {
- return this.num_workers;
- }
-
- public void set_num_workers(int num_workers) {
- this.num_workers = num_workers;
- set_num_workers_isSet(true);
- }
-
- public void unset_num_workers() {
- __isset_bit_vector.clear(__NUM_WORKERS_ISSET_ID);
- }
-
- /** Returns true if field num_workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_num_workers() {
- return __isset_bit_vector.get(__NUM_WORKERS_ISSET_ID);
- }
-
- public void set_num_workers_isSet(boolean value) {
- __isset_bit_vector.set(__NUM_WORKERS_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case WAIT_SECS:
- if (value == null) {
- unset_wait_secs();
- } else {
- set_wait_secs((Integer)value);
- }
- break;
-
- case NUM_WORKERS:
- if (value == null) {
- unset_num_workers();
- } else {
- set_num_workers((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case WAIT_SECS:
- return Integer.valueOf(get_wait_secs());
-
- case NUM_WORKERS:
- return Integer.valueOf(get_num_workers());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case WAIT_SECS:
- return is_set_wait_secs();
- case NUM_WORKERS:
- return is_set_num_workers();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof RebalanceOptions)
- return this.equals((RebalanceOptions)that);
- return false;
- }
-
- public boolean equals(RebalanceOptions that) {
- if (that == null)
- return false;
-
- boolean this_present_wait_secs = true && this.is_set_wait_secs();
- boolean that_present_wait_secs = true && that.is_set_wait_secs();
- if (this_present_wait_secs || that_present_wait_secs) {
- if (!(this_present_wait_secs && that_present_wait_secs))
- return false;
- if (this.wait_secs != that.wait_secs)
- return false;
- }
-
- boolean this_present_num_workers = true && this.is_set_num_workers();
- boolean that_present_num_workers = true && that.is_set_num_workers();
- if (this_present_num_workers || that_present_num_workers) {
- if (!(this_present_num_workers && that_present_num_workers))
- return false;
- if (this.num_workers != that.num_workers)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_wait_secs = true && (is_set_wait_secs());
- builder.append(present_wait_secs);
- if (present_wait_secs)
- builder.append(wait_secs);
-
- boolean present_num_workers = true && (is_set_num_workers());
- builder.append(present_num_workers);
- if (present_num_workers)
- builder.append(num_workers);
-
- return builder.toHashCode();
- }
-
- public int compareTo(RebalanceOptions other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- RebalanceOptions typedOther = (RebalanceOptions)other;
-
- lastComparison = Boolean.valueOf(is_set_wait_secs()).compareTo(typedOther.is_set_wait_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_wait_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.wait_secs, typedOther.wait_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_num_workers()).compareTo(typedOther.is_set_num_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_num_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.num_workers, typedOther.num_workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // WAIT_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.wait_secs = iprot.readI32();
- set_wait_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // NUM_WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.num_workers = iprot.readI32();
- set_num_workers_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (is_set_wait_secs()) {
- oprot.writeFieldBegin(WAIT_SECS_FIELD_DESC);
- oprot.writeI32(this.wait_secs);
- oprot.writeFieldEnd();
- }
- if (is_set_num_workers()) {
- oprot.writeFieldBegin(NUM_WORKERS_FIELD_DESC);
- oprot.writeI32(this.num_workers);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("RebalanceOptions(");
- boolean first = true;
-
- if (is_set_wait_secs()) {
- sb.append("wait_secs:");
- sb.append(this.wait_secs);
- first = false;
- }
- if (is_set_num_workers()) {
- if (!first) sb.append(", ");
- sb.append("num_workers:");
- sb.append(this.num_workers);
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ShellComponent.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ShellComponent.java b/jstorm-client/src/main/java/backtype/storm/generated/ShellComponent.java
deleted file mode 100644
index 58fef88..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ShellComponent.java
+++ /dev/null
@@ -1,417 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ShellComponent implements org.apache.thrift7.TBase<ShellComponent, ShellComponent._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ShellComponent");
-
- private static final org.apache.thrift7.protocol.TField EXECUTION_COMMAND_FIELD_DESC = new org.apache.thrift7.protocol.TField("execution_command", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField SCRIPT_FIELD_DESC = new org.apache.thrift7.protocol.TField("script", org.apache.thrift7.protocol.TType.STRING, (short)2);
-
- private String execution_command; // required
- private String script; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- EXECUTION_COMMAND((short)1, "execution_command"),
- SCRIPT((short)2, "script");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // EXECUTION_COMMAND
- return EXECUTION_COMMAND;
- case 2: // SCRIPT
- return SCRIPT;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.EXECUTION_COMMAND, new org.apache.thrift7.meta_data.FieldMetaData("execution_command", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.SCRIPT, new org.apache.thrift7.meta_data.FieldMetaData("script", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ShellComponent.class, metaDataMap);
- }
-
- public ShellComponent() {
- }
-
- public ShellComponent(
- String execution_command,
- String script)
- {
- this();
- this.execution_command = execution_command;
- this.script = script;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public ShellComponent(ShellComponent other) {
- if (other.is_set_execution_command()) {
- this.execution_command = other.execution_command;
- }
- if (other.is_set_script()) {
- this.script = other.script;
- }
- }
-
- public ShellComponent deepCopy() {
- return new ShellComponent(this);
- }
-
- @Override
- public void clear() {
- this.execution_command = null;
- this.script = null;
- }
-
- public String get_execution_command() {
- return this.execution_command;
- }
-
- public void set_execution_command(String execution_command) {
- this.execution_command = execution_command;
- }
-
- public void unset_execution_command() {
- this.execution_command = null;
- }
-
- /** Returns true if field execution_command is set (has been assigned a value) and false otherwise */
- public boolean is_set_execution_command() {
- return this.execution_command != null;
- }
-
- public void set_execution_command_isSet(boolean value) {
- if (!value) {
- this.execution_command = null;
- }
- }
-
- public String get_script() {
- return this.script;
- }
-
- public void set_script(String script) {
- this.script = script;
- }
-
- public void unset_script() {
- this.script = null;
- }
-
- /** Returns true if field script is set (has been assigned a value) and false otherwise */
- public boolean is_set_script() {
- return this.script != null;
- }
-
- public void set_script_isSet(boolean value) {
- if (!value) {
- this.script = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case EXECUTION_COMMAND:
- if (value == null) {
- unset_execution_command();
- } else {
- set_execution_command((String)value);
- }
- break;
-
- case SCRIPT:
- if (value == null) {
- unset_script();
- } else {
- set_script((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case EXECUTION_COMMAND:
- return get_execution_command();
-
- case SCRIPT:
- return get_script();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case EXECUTION_COMMAND:
- return is_set_execution_command();
- case SCRIPT:
- return is_set_script();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof ShellComponent)
- return this.equals((ShellComponent)that);
- return false;
- }
-
- public boolean equals(ShellComponent that) {
- if (that == null)
- return false;
-
- boolean this_present_execution_command = true && this.is_set_execution_command();
- boolean that_present_execution_command = true && that.is_set_execution_command();
- if (this_present_execution_command || that_present_execution_command) {
- if (!(this_present_execution_command && that_present_execution_command))
- return false;
- if (!this.execution_command.equals(that.execution_command))
- return false;
- }
-
- boolean this_present_script = true && this.is_set_script();
- boolean that_present_script = true && that.is_set_script();
- if (this_present_script || that_present_script) {
- if (!(this_present_script && that_present_script))
- return false;
- if (!this.script.equals(that.script))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_execution_command = true && (is_set_execution_command());
- builder.append(present_execution_command);
- if (present_execution_command)
- builder.append(execution_command);
-
- boolean present_script = true && (is_set_script());
- builder.append(present_script);
- if (present_script)
- builder.append(script);
-
- return builder.toHashCode();
- }
-
- public int compareTo(ShellComponent other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- ShellComponent typedOther = (ShellComponent)other;
-
- lastComparison = Boolean.valueOf(is_set_execution_command()).compareTo(typedOther.is_set_execution_command());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_execution_command()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.execution_command, typedOther.execution_command);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_script()).compareTo(typedOther.is_set_script());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_script()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.script, typedOther.script);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // EXECUTION_COMMAND
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.execution_command = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // SCRIPT
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.script = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.execution_command != null) {
- oprot.writeFieldBegin(EXECUTION_COMMAND_FIELD_DESC);
- oprot.writeString(this.execution_command);
- oprot.writeFieldEnd();
- }
- if (this.script != null) {
- oprot.writeFieldBegin(SCRIPT_FIELD_DESC);
- oprot.writeString(this.script);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ShellComponent(");
- boolean first = true;
-
- sb.append("execution_command:");
- if (this.execution_command == null) {
- sb.append("null");
- } else {
- sb.append(this.execution_command);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("script:");
- if (this.script == null) {
- sb.append("null");
- } else {
- sb.append(this.script);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/SpoutSpec.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/SpoutSpec.java b/jstorm-client/src/main/java/backtype/storm/generated/SpoutSpec.java
deleted file mode 100644
index 60f6d4f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/SpoutSpec.java
+++ /dev/null
@@ -1,427 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SpoutSpec implements org.apache.thrift7.TBase<SpoutSpec, SpoutSpec._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("SpoutSpec");
-
- private static final org.apache.thrift7.protocol.TField SPOUT_OBJECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("spout_object", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
- private static final org.apache.thrift7.protocol.TField COMMON_FIELD_DESC = new org.apache.thrift7.protocol.TField("common", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
-
- private ComponentObject spout_object; // required
- private ComponentCommon common; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SPOUT_OBJECT((short)1, "spout_object"),
- COMMON((short)2, "common");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // SPOUT_OBJECT
- return SPOUT_OBJECT;
- case 2: // COMMON
- return COMMON;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SPOUT_OBJECT, new org.apache.thrift7.meta_data.FieldMetaData("spout_object", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentObject.class)));
- tmpMap.put(_Fields.COMMON, new org.apache.thrift7.meta_data.FieldMetaData("common", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentCommon.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(SpoutSpec.class, metaDataMap);
- }
-
- public SpoutSpec() {
- }
-
- public SpoutSpec(
- ComponentObject spout_object,
- ComponentCommon common)
- {
- this();
- this.spout_object = spout_object;
- this.common = common;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public SpoutSpec(SpoutSpec other) {
- if (other.is_set_spout_object()) {
- this.spout_object = new ComponentObject(other.spout_object);
- }
- if (other.is_set_common()) {
- this.common = new ComponentCommon(other.common);
- }
- }
-
- public SpoutSpec deepCopy() {
- return new SpoutSpec(this);
- }
-
- @Override
- public void clear() {
- this.spout_object = null;
- this.common = null;
- }
-
- public ComponentObject get_spout_object() {
- return this.spout_object;
- }
-
- public void set_spout_object(ComponentObject spout_object) {
- this.spout_object = spout_object;
- }
-
- public void unset_spout_object() {
- this.spout_object = null;
- }
-
- /** Returns true if field spout_object is set (has been assigned a value) and false otherwise */
- public boolean is_set_spout_object() {
- return this.spout_object != null;
- }
-
- public void set_spout_object_isSet(boolean value) {
- if (!value) {
- this.spout_object = null;
- }
- }
-
- public ComponentCommon get_common() {
- return this.common;
- }
-
- public void set_common(ComponentCommon common) {
- this.common = common;
- }
-
- public void unset_common() {
- this.common = null;
- }
-
- /** Returns true if field common is set (has been assigned a value) and false otherwise */
- public boolean is_set_common() {
- return this.common != null;
- }
-
- public void set_common_isSet(boolean value) {
- if (!value) {
- this.common = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SPOUT_OBJECT:
- if (value == null) {
- unset_spout_object();
- } else {
- set_spout_object((ComponentObject)value);
- }
- break;
-
- case COMMON:
- if (value == null) {
- unset_common();
- } else {
- set_common((ComponentCommon)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SPOUT_OBJECT:
- return get_spout_object();
-
- case COMMON:
- return get_common();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SPOUT_OBJECT:
- return is_set_spout_object();
- case COMMON:
- return is_set_common();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof SpoutSpec)
- return this.equals((SpoutSpec)that);
- return false;
- }
-
- public boolean equals(SpoutSpec that) {
- if (that == null)
- return false;
-
- boolean this_present_spout_object = true && this.is_set_spout_object();
- boolean that_present_spout_object = true && that.is_set_spout_object();
- if (this_present_spout_object || that_present_spout_object) {
- if (!(this_present_spout_object && that_present_spout_object))
- return false;
- if (!this.spout_object.equals(that.spout_object))
- return false;
- }
-
- boolean this_present_common = true && this.is_set_common();
- boolean that_present_common = true && that.is_set_common();
- if (this_present_common || that_present_common) {
- if (!(this_present_common && that_present_common))
- return false;
- if (!this.common.equals(that.common))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_spout_object = true && (is_set_spout_object());
- builder.append(present_spout_object);
- if (present_spout_object)
- builder.append(spout_object);
-
- boolean present_common = true && (is_set_common());
- builder.append(present_common);
- if (present_common)
- builder.append(common);
-
- return builder.toHashCode();
- }
-
- public int compareTo(SpoutSpec other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- SpoutSpec typedOther = (SpoutSpec)other;
-
- lastComparison = Boolean.valueOf(is_set_spout_object()).compareTo(typedOther.is_set_spout_object());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_spout_object()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.spout_object, typedOther.spout_object);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_common()).compareTo(typedOther.is_set_common());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_common()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.common, typedOther.common);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // SPOUT_OBJECT
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.spout_object = new ComponentObject();
- this.spout_object.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // COMMON
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.common = new ComponentCommon();
- this.common.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.spout_object != null) {
- oprot.writeFieldBegin(SPOUT_OBJECT_FIELD_DESC);
- this.spout_object.write(oprot);
- oprot.writeFieldEnd();
- }
- if (this.common != null) {
- oprot.writeFieldBegin(COMMON_FIELD_DESC);
- this.common.write(oprot);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("SpoutSpec(");
- boolean first = true;
-
- sb.append("spout_object:");
- if (this.spout_object == null) {
- sb.append("null");
- } else {
- sb.append(this.spout_object);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("common:");
- if (this.common == null) {
- sb.append("null");
- } else {
- sb.append(this.common);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_spout_object()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'spout_object' is unset! Struct:" + toString());
- }
-
- if (!is_set_common()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'common' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/SpoutStats.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/SpoutStats.java b/jstorm-client/src/main/java/backtype/storm/generated/SpoutStats.java
deleted file mode 100644
index cad95c1..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/SpoutStats.java
+++ /dev/null
@@ -1,756 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SpoutStats implements org.apache.thrift7.TBase<SpoutStats, SpoutStats._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("SpoutStats");
-
- private static final org.apache.thrift7.protocol.TField ACKED_FIELD_DESC = new org.apache.thrift7.protocol.TField("acked", org.apache.thrift7.protocol.TType.MAP, (short)1);
- private static final org.apache.thrift7.protocol.TField FAILED_FIELD_DESC = new org.apache.thrift7.protocol.TField("failed", org.apache.thrift7.protocol.TType.MAP, (short)2);
- private static final org.apache.thrift7.protocol.TField COMPLETE_MS_AVG_FIELD_DESC = new org.apache.thrift7.protocol.TField("complete_ms_avg", org.apache.thrift7.protocol.TType.MAP, (short)3);
-
- private Map<String,Map<String,Long>> acked; // required
- private Map<String,Map<String,Long>> failed; // required
- private Map<String,Map<String,Double>> complete_ms_avg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ACKED((short)1, "acked"),
- FAILED((short)2, "failed"),
- COMPLETE_MS_AVG((short)3, "complete_ms_avg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ACKED
- return ACKED;
- case 2: // FAILED
- return FAILED;
- case 3: // COMPLETE_MS_AVG
- return COMPLETE_MS_AVG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ACKED, new org.apache.thrift7.meta_data.FieldMetaData("acked", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.FAILED, new org.apache.thrift7.meta_data.FieldMetaData("failed", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.COMPLETE_MS_AVG, new org.apache.thrift7.meta_data.FieldMetaData("complete_ms_avg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(SpoutStats.class, metaDataMap);
- }
-
- public SpoutStats() {
- }
-
- public SpoutStats(
- Map<String,Map<String,Long>> acked,
- Map<String,Map<String,Long>> failed,
- Map<String,Map<String,Double>> complete_ms_avg)
- {
- this();
- this.acked = acked;
- this.failed = failed;
- this.complete_ms_avg = complete_ms_avg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public SpoutStats(SpoutStats other) {
- if (other.is_set_acked()) {
- Map<String,Map<String,Long>> __this__acked = new HashMap<String,Map<String,Long>>();
- for (Map.Entry<String, Map<String,Long>> other_element : other.acked.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<String,Long> other_element_value = other_element.getValue();
-
- String __this__acked_copy_key = other_element_key;
-
- Map<String,Long> __this__acked_copy_value = new HashMap<String,Long>();
- for (Map.Entry<String, Long> other_element_value_element : other_element_value.entrySet()) {
-
- String other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- String __this__acked_copy_value_copy_key = other_element_value_element_key;
-
- Long __this__acked_copy_value_copy_value = other_element_value_element_value;
-
- __this__acked_copy_value.put(__this__acked_copy_value_copy_key, __this__acked_copy_value_copy_value);
- }
-
- __this__acked.put(__this__acked_copy_key, __this__acked_copy_value);
- }
- this.acked = __this__acked;
- }
- if (other.is_set_failed()) {
- Map<String,Map<String,Long>> __this__failed = new HashMap<String,Map<String,Long>>();
- for (Map.Entry<String, Map<String,Long>> other_element : other.failed.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<String,Long> other_element_value = other_element.getValue();
-
- String __this__failed_copy_key = other_element_key;
-
- Map<String,Long> __this__failed_copy_value = new HashMap<String,Long>();
- for (Map.Entry<String, Long> other_element_value_element : other_element_value.entrySet()) {
-
- String other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- String __this__failed_copy_value_copy_key = other_element_value_element_key;
-
- Long __this__failed_copy_value_copy_value = other_element_value_element_value;
-
- __this__failed_copy_value.put(__this__failed_copy_value_copy_key, __this__failed_copy_value_copy_value);
- }
-
- __this__failed.put(__this__failed_copy_key, __this__failed_copy_value);
- }
- this.failed = __this__failed;
- }
- if (other.is_set_complete_ms_avg()) {
- Map<String,Map<String,Double>> __this__complete_ms_avg = new HashMap<String,Map<String,Double>>();
- for (Map.Entry<String, Map<String,Double>> other_element : other.complete_ms_avg.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<String,Double> other_element_value = other_element.getValue();
-
- String __this__complete_ms_avg_copy_key = other_element_key;
-
- Map<String,Double> __this__complete_ms_avg_copy_value = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element_value_element : other_element_value.entrySet()) {
-
- String other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- String __this__complete_ms_avg_copy_value_copy_key = other_element_value_element_key;
-
- Double __this__complete_ms_avg_copy_value_copy_value = other_element_value_element_value;
-
- __this__complete_ms_avg_copy_value.put(__this__complete_ms_avg_copy_value_copy_key, __this__complete_ms_avg_copy_value_copy_value);
- }
-
- __this__complete_ms_avg.put(__this__complete_ms_avg_copy_key, __this__complete_ms_avg_copy_value);
- }
- this.complete_ms_avg = __this__complete_ms_avg;
- }
- }
-
- public SpoutStats deepCopy() {
- return new SpoutStats(this);
- }
-
- @Override
- public void clear() {
- this.acked = null;
- this.failed = null;
- this.complete_ms_avg = null;
- }
-
- public int get_acked_size() {
- return (this.acked == null) ? 0 : this.acked.size();
- }
-
- public void put_to_acked(String key, Map<String,Long> val) {
- if (this.acked == null) {
- this.acked = new HashMap<String,Map<String,Long>>();
- }
- this.acked.put(key, val);
- }
-
- public Map<String,Map<String,Long>> get_acked() {
- return this.acked;
- }
-
- public void set_acked(Map<String,Map<String,Long>> acked) {
- this.acked = acked;
- }
-
- public void unset_acked() {
- this.acked = null;
- }
-
- /** Returns true if field acked is set (has been assigned a value) and false otherwise */
- public boolean is_set_acked() {
- return this.acked != null;
- }
-
- public void set_acked_isSet(boolean value) {
- if (!value) {
- this.acked = null;
- }
- }
-
- public int get_failed_size() {
- return (this.failed == null) ? 0 : this.failed.size();
- }
-
- public void put_to_failed(String key, Map<String,Long> val) {
- if (this.failed == null) {
- this.failed = new HashMap<String,Map<String,Long>>();
- }
- this.failed.put(key, val);
- }
-
- public Map<String,Map<String,Long>> get_failed() {
- return this.failed;
- }
-
- public void set_failed(Map<String,Map<String,Long>> failed) {
- this.failed = failed;
- }
-
- public void unset_failed() {
- this.failed = null;
- }
-
- /** Returns true if field failed is set (has been assigned a value) and false otherwise */
- public boolean is_set_failed() {
- return this.failed != null;
- }
-
- public void set_failed_isSet(boolean value) {
- if (!value) {
- this.failed = null;
- }
- }
-
- public int get_complete_ms_avg_size() {
- return (this.complete_ms_avg == null) ? 0 : this.complete_ms_avg.size();
- }
-
- public void put_to_complete_ms_avg(String key, Map<String,Double> val) {
- if (this.complete_ms_avg == null) {
- this.complete_ms_avg = new HashMap<String,Map<String,Double>>();
- }
- this.complete_ms_avg.put(key, val);
- }
-
- public Map<String,Map<String,Double>> get_complete_ms_avg() {
- return this.complete_ms_avg;
- }
-
- public void set_complete_ms_avg(Map<String,Map<String,Double>> complete_ms_avg) {
- this.complete_ms_avg = complete_ms_avg;
- }
-
- public void unset_complete_ms_avg() {
- this.complete_ms_avg = null;
- }
-
- /** Returns true if field complete_ms_avg is set (has been assigned a value) and false otherwise */
- public boolean is_set_complete_ms_avg() {
- return this.complete_ms_avg != null;
- }
-
- public void set_complete_ms_avg_isSet(boolean value) {
- if (!value) {
- this.complete_ms_avg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ACKED:
- if (value == null) {
- unset_acked();
- } else {
- set_acked((Map<String,Map<String,Long>>)value);
- }
- break;
-
- case FAILED:
- if (value == null) {
- unset_failed();
- } else {
- set_failed((Map<String,Map<String,Long>>)value);
- }
- break;
-
- case COMPLETE_MS_AVG:
- if (value == null) {
- unset_complete_ms_avg();
- } else {
- set_complete_ms_avg((Map<String,Map<String,Double>>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ACKED:
- return get_acked();
-
- case FAILED:
- return get_failed();
-
- case COMPLETE_MS_AVG:
- return get_complete_ms_avg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ACKED:
- return is_set_acked();
- case FAILED:
- return is_set_failed();
- case COMPLETE_MS_AVG:
- return is_set_complete_ms_avg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof SpoutStats)
- return this.equals((SpoutStats)that);
- return false;
- }
-
- public boolean equals(SpoutStats that) {
- if (that == null)
- return false;
-
- boolean this_present_acked = true && this.is_set_acked();
- boolean that_present_acked = true && that.is_set_acked();
- if (this_present_acked || that_present_acked) {
- if (!(this_present_acked && that_present_acked))
- return false;
- if (!this.acked.equals(that.acked))
- return false;
- }
-
- boolean this_present_failed = true && this.is_set_failed();
- boolean that_present_failed = true && that.is_set_failed();
- if (this_present_failed || that_present_failed) {
- if (!(this_present_failed && that_present_failed))
- return false;
- if (!this.failed.equals(that.failed))
- return false;
- }
-
- boolean this_present_complete_ms_avg = true && this.is_set_complete_ms_avg();
- boolean that_present_complete_ms_avg = true && that.is_set_complete_ms_avg();
- if (this_present_complete_ms_avg || that_present_complete_ms_avg) {
- if (!(this_present_complete_ms_avg && that_present_complete_ms_avg))
- return false;
- if (!this.complete_ms_avg.equals(that.complete_ms_avg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_acked = true && (is_set_acked());
- builder.append(present_acked);
- if (present_acked)
- builder.append(acked);
-
- boolean present_failed = true && (is_set_failed());
- builder.append(present_failed);
- if (present_failed)
- builder.append(failed);
-
- boolean present_complete_ms_avg = true && (is_set_complete_ms_avg());
- builder.append(present_complete_ms_avg);
- if (present_complete_ms_avg)
- builder.append(complete_ms_avg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(SpoutStats other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- SpoutStats typedOther = (SpoutStats)other;
-
- lastComparison = Boolean.valueOf(is_set_acked()).compareTo(typedOther.is_set_acked());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_acked()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.acked, typedOther.acked);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_failed()).compareTo(typedOther.is_set_failed());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_failed()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.failed, typedOther.failed);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_complete_ms_avg()).compareTo(typedOther.is_set_complete_ms_avg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_complete_ms_avg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.complete_ms_avg, typedOther.complete_ms_avg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ACKED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map95 = iprot.readMapBegin();
- this.acked = new HashMap<String,Map<String,Long>>(2*_map95.size);
- for (int _i96 = 0; _i96 < _map95.size; ++_i96)
- {
- String _key97; // required
- Map<String,Long> _val98; // required
- _key97 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map99 = iprot.readMapBegin();
- _val98 = new HashMap<String,Long>(2*_map99.size);
- for (int _i100 = 0; _i100 < _map99.size; ++_i100)
- {
- String _key101; // required
- long _val102; // required
- _key101 = iprot.readString();
- _val102 = iprot.readI64();
- _val98.put(_key101, _val102);
- }
- iprot.readMapEnd();
- }
- this.acked.put(_key97, _val98);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // FAILED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map103 = iprot.readMapBegin();
- this.failed = new HashMap<String,Map<String,Long>>(2*_map103.size);
- for (int _i104 = 0; _i104 < _map103.size; ++_i104)
- {
- String _key105; // required
- Map<String,Long> _val106; // required
- _key105 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map107 = iprot.readMapBegin();
- _val106 = new HashMap<String,Long>(2*_map107.size);
- for (int _i108 = 0; _i108 < _map107.size; ++_i108)
- {
- String _key109; // required
- long _val110; // required
- _key109 = iprot.readString();
- _val110 = iprot.readI64();
- _val106.put(_key109, _val110);
- }
- iprot.readMapEnd();
- }
- this.failed.put(_key105, _val106);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // COMPLETE_MS_AVG
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map111 = iprot.readMapBegin();
- this.complete_ms_avg = new HashMap<String,Map<String,Double>>(2*_map111.size);
- for (int _i112 = 0; _i112 < _map111.size; ++_i112)
- {
- String _key113; // required
- Map<String,Double> _val114; // required
- _key113 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map115 = iprot.readMapBegin();
- _val114 = new HashMap<String,Double>(2*_map115.size);
- for (int _i116 = 0; _i116 < _map115.size; ++_i116)
- {
- String _key117; // required
- double _val118; // required
- _key117 = iprot.readString();
- _val118 = iprot.readDouble();
- _val114.put(_key117, _val118);
- }
- iprot.readMapEnd();
- }
- this.complete_ms_avg.put(_key113, _val114);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.acked != null) {
- oprot.writeFieldBegin(ACKED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.acked.size()));
- for (Map.Entry<String, Map<String,Long>> _iter119 : this.acked.entrySet())
- {
- oprot.writeString(_iter119.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.I64, _iter119.getValue().size()));
- for (Map.Entry<String, Long> _iter120 : _iter119.getValue().entrySet())
- {
- oprot.writeString(_iter120.getKey());
- oprot.writeI64(_iter120.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.failed != null) {
- oprot.writeFieldBegin(FAILED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.failed.size()));
- for (Map.Entry<String, Map<String,Long>> _iter121 : this.failed.entrySet())
- {
- oprot.writeString(_iter121.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.I64, _iter121.getValue().size()));
- for (Map.Entry<String, Long> _iter122 : _iter121.getValue().entrySet())
- {
- oprot.writeString(_iter122.getKey());
- oprot.writeI64(_iter122.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.complete_ms_avg != null) {
- oprot.writeFieldBegin(COMPLETE_MS_AVG_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.complete_ms_avg.size()));
- for (Map.Entry<String, Map<String,Double>> _iter123 : this.complete_ms_avg.entrySet())
- {
- oprot.writeString(_iter123.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, _iter123.getValue().size()));
- for (Map.Entry<String, Double> _iter124 : _iter123.getValue().entrySet())
- {
- oprot.writeString(_iter124.getKey());
- oprot.writeDouble(_iter124.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("SpoutStats(");
- boolean first = true;
-
- sb.append("acked:");
- if (this.acked == null) {
- sb.append("null");
- } else {
- sb.append(this.acked);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("failed:");
- if (this.failed == null) {
- sb.append("null");
- } else {
- sb.append(this.failed);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("complete_ms_avg:");
- if (this.complete_ms_avg == null) {
- sb.append("null");
- } else {
- sb.append(this.complete_ms_avg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_acked()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'acked' is unset! Struct:" + toString());
- }
-
- if (!is_set_failed()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'failed' is unset! Struct:" + toString());
- }
-
- if (!is_set_complete_ms_avg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'complete_ms_avg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[40/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPC.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPC.java b/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPC.java
deleted file mode 100644
index c2e3321..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPC.java
+++ /dev/null
@@ -1,964 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class DistributedRPC {
-
- public interface Iface {
-
- public String execute(String functionName, String funcArgs) throws DRPCExecutionException, org.apache.thrift7.TException;
-
- }
-
- public interface AsyncIface {
-
- public void execute(String functionName, String funcArgs, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.execute_call> resultHandler) throws org.apache.thrift7.TException;
-
- }
-
- public static class Client extends org.apache.thrift7.TServiceClient implements Iface {
- public static class Factory implements org.apache.thrift7.TServiceClientFactory<Client> {
- public Factory() {}
- public Client getClient(org.apache.thrift7.protocol.TProtocol prot) {
- return new Client(prot);
- }
- public Client getClient(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- return new Client(iprot, oprot);
- }
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol prot)
- {
- super(prot, prot);
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- super(iprot, oprot);
- }
-
- public String execute(String functionName, String funcArgs) throws DRPCExecutionException, org.apache.thrift7.TException
- {
- send_execute(functionName, funcArgs);
- return recv_execute();
- }
-
- public void send_execute(String functionName, String funcArgs) throws org.apache.thrift7.TException
- {
- execute_args args = new execute_args();
- args.set_functionName(functionName);
- args.set_funcArgs(funcArgs);
- sendBase("execute", args);
- }
-
- public String recv_execute() throws DRPCExecutionException, org.apache.thrift7.TException
- {
- execute_result result = new execute_result();
- receiveBase(result, "execute");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "execute failed: unknown result");
- }
-
- }
- public static class AsyncClient extends org.apache.thrift7.async.TAsyncClient implements AsyncIface {
- public static class Factory implements org.apache.thrift7.async.TAsyncClientFactory<AsyncClient> {
- private org.apache.thrift7.async.TAsyncClientManager clientManager;
- private org.apache.thrift7.protocol.TProtocolFactory protocolFactory;
- public Factory(org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.protocol.TProtocolFactory protocolFactory) {
- this.clientManager = clientManager;
- this.protocolFactory = protocolFactory;
- }
- public AsyncClient getAsyncClient(org.apache.thrift7.transport.TNonblockingTransport transport) {
- return new AsyncClient(protocolFactory, clientManager, transport);
- }
- }
-
- public AsyncClient(org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.transport.TNonblockingTransport transport) {
- super(protocolFactory, clientManager, transport);
- }
-
- public void execute(String functionName, String funcArgs, org.apache.thrift7.async.AsyncMethodCallback<execute_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- execute_call method_call = new execute_call(functionName, funcArgs, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class execute_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String functionName;
- private String funcArgs;
- public execute_call(String functionName, String funcArgs, org.apache.thrift7.async.AsyncMethodCallback<execute_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.functionName = functionName;
- this.funcArgs = funcArgs;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("execute", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- execute_args args = new execute_args();
- args.set_functionName(functionName);
- args.set_funcArgs(funcArgs);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws DRPCExecutionException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_execute();
- }
- }
-
- }
-
- public static class Processor<I extends Iface> extends org.apache.thrift7.TBaseProcessor implements org.apache.thrift7.TProcessor {
- private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
- public Processor(I iface) {
- super(iface, getProcessMap(new HashMap<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>>()));
- }
-
- protected Processor(I iface, Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- super(iface, getProcessMap(processMap));
- }
-
- private static <I extends Iface> Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> getProcessMap(Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- processMap.put("execute", new execute());
- return processMap;
- }
-
- private static class execute<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, execute_args> {
- public execute() {
- super("execute");
- }
-
- protected execute_args getEmptyArgsInstance() {
- return new execute_args();
- }
-
- protected execute_result getResult(I iface, execute_args args) throws org.apache.thrift7.TException {
- execute_result result = new execute_result();
- try {
- result.success = iface.execute(args.functionName, args.funcArgs);
- } catch (DRPCExecutionException e) {
- result.e = e;
- }
- return result;
- }
- }
-
- }
-
- public static class execute_args implements org.apache.thrift7.TBase<execute_args, execute_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("execute_args");
-
- private static final org.apache.thrift7.protocol.TField FUNCTION_NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("functionName", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField FUNC_ARGS_FIELD_DESC = new org.apache.thrift7.protocol.TField("funcArgs", org.apache.thrift7.protocol.TType.STRING, (short)2);
-
- private String functionName; // required
- private String funcArgs; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- FUNCTION_NAME((short)1, "functionName"),
- FUNC_ARGS((short)2, "funcArgs");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // FUNCTION_NAME
- return FUNCTION_NAME;
- case 2: // FUNC_ARGS
- return FUNC_ARGS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FUNCTION_NAME, new org.apache.thrift7.meta_data.FieldMetaData("functionName", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.FUNC_ARGS, new org.apache.thrift7.meta_data.FieldMetaData("funcArgs", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(execute_args.class, metaDataMap);
- }
-
- public execute_args() {
- }
-
- public execute_args(
- String functionName,
- String funcArgs)
- {
- this();
- this.functionName = functionName;
- this.funcArgs = funcArgs;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public execute_args(execute_args other) {
- if (other.is_set_functionName()) {
- this.functionName = other.functionName;
- }
- if (other.is_set_funcArgs()) {
- this.funcArgs = other.funcArgs;
- }
- }
-
- public execute_args deepCopy() {
- return new execute_args(this);
- }
-
- @Override
- public void clear() {
- this.functionName = null;
- this.funcArgs = null;
- }
-
- public String get_functionName() {
- return this.functionName;
- }
-
- public void set_functionName(String functionName) {
- this.functionName = functionName;
- }
-
- public void unset_functionName() {
- this.functionName = null;
- }
-
- /** Returns true if field functionName is set (has been assigned a value) and false otherwise */
- public boolean is_set_functionName() {
- return this.functionName != null;
- }
-
- public void set_functionName_isSet(boolean value) {
- if (!value) {
- this.functionName = null;
- }
- }
-
- public String get_funcArgs() {
- return this.funcArgs;
- }
-
- public void set_funcArgs(String funcArgs) {
- this.funcArgs = funcArgs;
- }
-
- public void unset_funcArgs() {
- this.funcArgs = null;
- }
-
- /** Returns true if field funcArgs is set (has been assigned a value) and false otherwise */
- public boolean is_set_funcArgs() {
- return this.funcArgs != null;
- }
-
- public void set_funcArgs_isSet(boolean value) {
- if (!value) {
- this.funcArgs = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case FUNCTION_NAME:
- if (value == null) {
- unset_functionName();
- } else {
- set_functionName((String)value);
- }
- break;
-
- case FUNC_ARGS:
- if (value == null) {
- unset_funcArgs();
- } else {
- set_funcArgs((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case FUNCTION_NAME:
- return get_functionName();
-
- case FUNC_ARGS:
- return get_funcArgs();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case FUNCTION_NAME:
- return is_set_functionName();
- case FUNC_ARGS:
- return is_set_funcArgs();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof execute_args)
- return this.equals((execute_args)that);
- return false;
- }
-
- public boolean equals(execute_args that) {
- if (that == null)
- return false;
-
- boolean this_present_functionName = true && this.is_set_functionName();
- boolean that_present_functionName = true && that.is_set_functionName();
- if (this_present_functionName || that_present_functionName) {
- if (!(this_present_functionName && that_present_functionName))
- return false;
- if (!this.functionName.equals(that.functionName))
- return false;
- }
-
- boolean this_present_funcArgs = true && this.is_set_funcArgs();
- boolean that_present_funcArgs = true && that.is_set_funcArgs();
- if (this_present_funcArgs || that_present_funcArgs) {
- if (!(this_present_funcArgs && that_present_funcArgs))
- return false;
- if (!this.funcArgs.equals(that.funcArgs))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_functionName = true && (is_set_functionName());
- builder.append(present_functionName);
- if (present_functionName)
- builder.append(functionName);
-
- boolean present_funcArgs = true && (is_set_funcArgs());
- builder.append(present_funcArgs);
- if (present_funcArgs)
- builder.append(funcArgs);
-
- return builder.toHashCode();
- }
-
- public int compareTo(execute_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- execute_args typedOther = (execute_args)other;
-
- lastComparison = Boolean.valueOf(is_set_functionName()).compareTo(typedOther.is_set_functionName());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_functionName()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.functionName, typedOther.functionName);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_funcArgs()).compareTo(typedOther.is_set_funcArgs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_funcArgs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.funcArgs, typedOther.funcArgs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // FUNCTION_NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.functionName = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // FUNC_ARGS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.funcArgs = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.functionName != null) {
- oprot.writeFieldBegin(FUNCTION_NAME_FIELD_DESC);
- oprot.writeString(this.functionName);
- oprot.writeFieldEnd();
- }
- if (this.funcArgs != null) {
- oprot.writeFieldBegin(FUNC_ARGS_FIELD_DESC);
- oprot.writeString(this.funcArgs);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("execute_args(");
- boolean first = true;
-
- sb.append("functionName:");
- if (this.functionName == null) {
- sb.append("null");
- } else {
- sb.append(this.functionName);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("funcArgs:");
- if (this.funcArgs == null) {
- sb.append("null");
- } else {
- sb.append(this.funcArgs);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class execute_result implements org.apache.thrift7.TBase<execute_result, execute_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("execute_result");
-
- private static final org.apache.thrift7.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift7.protocol.TField("success", org.apache.thrift7.protocol.TType.STRING, (short)0);
- private static final org.apache.thrift7.protocol.TField E_FIELD_DESC = new org.apache.thrift7.protocol.TField("e", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
-
- private String success; // required
- private DRPCExecutionException e; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SUCCESS((short)0, "success"),
- E((short)1, "e");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 0: // SUCCESS
- return SUCCESS;
- case 1: // E
- return E;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUCCESS, new org.apache.thrift7.meta_data.FieldMetaData("success", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.E, new org.apache.thrift7.meta_data.FieldMetaData("e", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRUCT)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(execute_result.class, metaDataMap);
- }
-
- public execute_result() {
- }
-
- public execute_result(
- String success,
- DRPCExecutionException e)
- {
- this();
- this.success = success;
- this.e = e;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public execute_result(execute_result other) {
- if (other.is_set_success()) {
- this.success = other.success;
- }
- if (other.is_set_e()) {
- this.e = new DRPCExecutionException(other.e);
- }
- }
-
- public execute_result deepCopy() {
- return new execute_result(this);
- }
-
- @Override
- public void clear() {
- this.success = null;
- this.e = null;
- }
-
- public String get_success() {
- return this.success;
- }
-
- public void set_success(String success) {
- this.success = success;
- }
-
- public void unset_success() {
- this.success = null;
- }
-
- /** Returns true if field success is set (has been assigned a value) and false otherwise */
- public boolean is_set_success() {
- return this.success != null;
- }
-
- public void set_success_isSet(boolean value) {
- if (!value) {
- this.success = null;
- }
- }
-
- public DRPCExecutionException get_e() {
- return this.e;
- }
-
- public void set_e(DRPCExecutionException e) {
- this.e = e;
- }
-
- public void unset_e() {
- this.e = null;
- }
-
- /** Returns true if field e is set (has been assigned a value) and false otherwise */
- public boolean is_set_e() {
- return this.e != null;
- }
-
- public void set_e_isSet(boolean value) {
- if (!value) {
- this.e = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SUCCESS:
- if (value == null) {
- unset_success();
- } else {
- set_success((String)value);
- }
- break;
-
- case E:
- if (value == null) {
- unset_e();
- } else {
- set_e((DRPCExecutionException)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SUCCESS:
- return get_success();
-
- case E:
- return get_e();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SUCCESS:
- return is_set_success();
- case E:
- return is_set_e();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof execute_result)
- return this.equals((execute_result)that);
- return false;
- }
-
- public boolean equals(execute_result that) {
- if (that == null)
- return false;
-
- boolean this_present_success = true && this.is_set_success();
- boolean that_present_success = true && that.is_set_success();
- if (this_present_success || that_present_success) {
- if (!(this_present_success && that_present_success))
- return false;
- if (!this.success.equals(that.success))
- return false;
- }
-
- boolean this_present_e = true && this.is_set_e();
- boolean that_present_e = true && that.is_set_e();
- if (this_present_e || that_present_e) {
- if (!(this_present_e && that_present_e))
- return false;
- if (!this.e.equals(that.e))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_success = true && (is_set_success());
- builder.append(present_success);
- if (present_success)
- builder.append(success);
-
- boolean present_e = true && (is_set_e());
- builder.append(present_e);
- if (present_e)
- builder.append(e);
-
- return builder.toHashCode();
- }
-
- public int compareTo(execute_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- execute_result typedOther = (execute_result)other;
-
- lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_success()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.success, typedOther.success);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_e()).compareTo(typedOther.is_set_e());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_e()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.e, typedOther.e);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 0: // SUCCESS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.success = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 1: // E
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.e = new DRPCExecutionException();
- this.e.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- if (this.is_set_success()) {
- oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
- oprot.writeString(this.success);
- oprot.writeFieldEnd();
- } else if (this.is_set_e()) {
- oprot.writeFieldBegin(E_FIELD_DESC);
- this.e.write(oprot);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("execute_result(");
- boolean first = true;
-
- sb.append("success:");
- if (this.success == null) {
- sb.append("null");
- } else {
- sb.append(this.success);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("e:");
- if (this.e == null) {
- sb.append("null");
- } else {
- sb.append(this.e);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
-}
[55/60] [abbrv] storm git commit: removed jstorm-on-yarn subdirectory
Posted by pt...@apache.org.
removed jstorm-on-yarn subdirectory
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/e1f68448
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/e1f68448
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/e1f68448
Branch: refs/heads/jstorm-import
Commit: e1f684486d1ddd5835b1168ebb65f382ecbca5b2
Parents: e935da9
Author: P. Taylor Goetz <pt...@gmail.com>
Authored: Thu Nov 5 15:21:46 2015 -0500
Committer: P. Taylor Goetz <pt...@gmail.com>
Committed: Thu Nov 5 15:21:46 2015 -0500
----------------------------------------------------------------------
jstorm-on-yarn/README.md | 1 -
jstorm-on-yarn/bin/jstorm-yarn | 37 -
jstorm-on-yarn/bin/read-link | 27 -
jstorm-on-yarn/pom.xml | 229 -
.../java/com/alibaba/jstorm/yarn/Client.java | 134 -
.../java/com/alibaba/jstorm/yarn/Config.java | 73 -
.../com/alibaba/jstorm/yarn/JStormOnYarn.java | 335 --
.../com/alibaba/jstorm/yarn/LaunchCommand.java | 93 -
.../com/alibaba/jstorm/yarn/MasterClient.java | 50 -
.../com/alibaba/jstorm/yarn/MasterServer.java | 255 -
.../alibaba/jstorm/yarn/StormAMRMClient.java | 214 -
.../alibaba/jstorm/yarn/StormMasterCommand.java | 191 -
.../jstorm/yarn/StormMasterServerHandler.java | 220 -
.../main/java/com/alibaba/jstorm/yarn/Util.java | 399 --
.../java/com/alibaba/jstorm/yarn/Version.java | 29 -
.../com/alibaba/jstorm/yarn/VersionCommand.java | 35 -
.../jstorm/yarn/generated/StormMaster.java | 5113 ------------------
.../alibaba/jstorm/yarn/thrift/AuthUtils.java | 80 -
.../jstorm/yarn/thrift/ITransportPlugin.java | 38 -
.../alibaba/jstorm/yarn/thrift/ReqContext.java | 91 -
.../yarn/thrift/SimpleTransportPlugin.java | 107 -
.../jstorm/yarn/thrift/ThriftClient.java | 62 -
.../jstorm/yarn/thrift/ThriftServer.java | 71 -
jstorm-on-yarn/src/main/resources/logback.xml | 46 -
.../src/main/resources/master_defaults.yaml | 42 -
.../java/com/taobao/jstorm/yarn/AppTest.java | 38 -
26 files changed, 8010 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/README.md
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/README.md b/jstorm-on-yarn/README.md
deleted file mode 100644
index 51ba576..0000000
--- a/jstorm-on-yarn/README.md
+++ /dev/null
@@ -1 +0,0 @@
-Hello JStorm-on-YARN
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/bin/jstorm-yarn
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/bin/jstorm-yarn b/jstorm-on-yarn/bin/jstorm-yarn
deleted file mode 100644
index 4a078c8..0000000
--- a/jstorm-on-yarn/bin/jstorm-yarn
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-readonly JSTORM_ON_YARN_BIN="$(dirname "$(read-link "$0")")"
-readonly MASTER_JAR="$(ls "$JSTORM_ON_YARN_BIN"/../jstorm-yarn-*.jar "$JSTORM_ON_YARN_BIN"/../target/jstorm-yarn-*.jar 2> /dev/null | head -1)"
-
-if [ `command -v jstorm` ]; then
- readonly JSTORM_BIN="$(dirname "$(read-link "$(which jstorm)")")"
- JSTORM_CLASSPATH="$(jstorm classpath)"
-else
- echo "jstorm is not installed" >&2
- exit 1
-fi
-
-if [ -n "${JAVA_HOME}" ]; then
- RUNNER="${JAVA_HOME}/bin/java"
-else
- if [ `command -v java` ]; then
- RUNNER="java"
- else
- echo "JAVA_HOME is not set" >&2
- exit 1
- fi
-fi
-
-if [ `command -v yarn` ]; then
- YARN_CLASSPATH="$(yarn classpath)"
-else
- echo "yarn is not installed" >&2
- exit 1
-fi
-
-CLASSPATH="$JSTORM_YARN_CONF_DIR:$MASTER_JAR:$YARN_CLASSPATH:$JSTORM_CLASSPATH:$HOME/.jstorm"
-
-#echo "$RUNNER" -cp "$CLASSPATH" -Djstorm.home="$JSTORM_BIN"/.. com.alibaba.jstorm.yarn.Client "$@"
-#exec "$RUNNER" -Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000 -cp "$CLASSPATH" -Djstorm.home="$JSTORM_BIN"/.. com.alibaba.jstorm.yarn.Client "$@"
-exec "$RUNNER" -cp "$CLASSPATH" -Djstorm.home="$JSTORM_BIN"/.. com.alibaba.jstorm.yarn.Client "$@"
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/bin/read-link
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/bin/read-link b/jstorm-on-yarn/bin/read-link
deleted file mode 100644
index 11d0283..0000000
--- a/jstorm-on-yarn/bin/read-link
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/sh
-# Copyright (c) 2013 Yahoo! Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License. See accompanying LICENSE file.
-#
-
-cd `dirname $1`
-TARGET_FILE=`basename $1`
-
-while [ -L "$TARGET_FILE" ]
-do
- TARGET_FILE=`readlink $TARGET_FILE`
- cd `dirname $TARGET_FILE`
- TARGET_FILE=`basename $TARGET_FILE`
-done
-
-echo "$(pwd -P)/$TARGET_FILE"
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/pom.xml b/jstorm-on-yarn/pom.xml
deleted file mode 100644
index 22891df..0000000
--- a/jstorm-on-yarn/pom.xml
+++ /dev/null
@@ -1,229 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <modelVersion>4.0.0</modelVersion>
-
- <groupId>com.alibaba.jstorm.yarn</groupId>
- <artifactId>jstorm-yarn</artifactId>
- <version>1.0-SNAPSHOT</version>
- <packaging>jar</packaging>
-
- <name>jstorm-yarn</name>
- <url>http://maven.apache.org</url>
-
- <repositories>
- <repository>
- <id>twitter4j</id>
- <url>http://twitter4j.org/maven2</url>
- </repository>
- <repository>
- <id>central</id>
- <url>http://repo1.maven.org/maven2/</url>
- </repository>
- <repository>
- <id>clojars</id>
- <url>https://clojars.org/repo/</url>
- </repository>
- <repository>
- <id>apache.snapshots</id>
- <url>http://repository.apache.org/content/repositories/snapshots/</url>
- </repository>
- <!--repository>
- <id>hortonworks</id>
- <url>http://repo.hortonworks.com/content/repositories/releases/</url>
- </repository-->
-
- <!-- Special: following this instruction alternative 3
- http://slf4j.org/faq.html#excludingJCL -->
- <repository>
- <id>version99</id>
- <!-- highly available repository serving empty artifacts -->
- <url>http://version99.qos.ch/</url>
- </repository>
- </repositories>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <jstorm.version>0.9.0-wip21</jstorm.version>
- <hadoop.version>2.2.0</hadoop.version>
- </properties>
-
- <dependencies>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>jcl-over-slf4j</artifactId>
- <version>1.7.2</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>2.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-tests</artifactId>
- <version>${hadoop.version}</version>
- <type>test-jar</type>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-common</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-mapreduce-client-core</artifactId>
- <version>${hadoop.version}</version>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-client</artifactId>
- <version>${hadoop.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.apache.zookeeper</groupId>
- <artifactId>zookeeper</artifactId>
- <version>3.3.3</version>
- <scope>test</scope>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.mockito</groupId>
- <artifactId>mockito-all</artifactId>
- <version>1.9.5</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.yaml</groupId>
- <artifactId>snakeyaml</artifactId>
- <version>1.13</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>0.9.3.1</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>0.9.3.1</version>
- <scope>provided</scope>
- </dependency>
-
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>0.9.3.1</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>storm</groupId>
- <artifactId>libthrift7</artifactId>
- <version>0.7.0</version>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>org.apache.thrift</groupId>
- <artifactId>libthrift</artifactId>
- <version>0.6.1</version>
- </dependency>
-
-
-
-
- <dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>1.1</version>
- </dependency>
-
- <dependency>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- <version>1.0.0</version>
- </dependency>
-
-
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-jar-plugin</artifactId>
- <version>2.4</version>
- <executions>
- <execution>
- <goals>
- <goal>jar</goal>
- </goals>
- <!-- strictly speaking, the unit test is really a regression test.
- It needs the main jar to be available to be able to run. -->
- <phase>test-compile</phase>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>3.0</version>
- <configuration>
- <fork>true</fork>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-surefire-plugin</artifactId>
- <version>2.10</version>
- <configuration>
- <additionalClasspathElements>
- <additionalClasspathElement>conf</additionalClasspathElement>
- </additionalClasspathElements>
- </configuration>
- </plugin>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- <archive>
- <manifest>
- <mainClass>storm.starter.SequenceTopology</mainClass>
- </manifest>
- </archive>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Client.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Client.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Client.java
deleted file mode 100644
index 37f3674..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Client.java
+++ /dev/null
@@ -1,134 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Options;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Client {
-
- private static final Logger LOG = LoggerFactory.getLogger(Client.class);
-
- public static interface ClientCommand {
-
- /**
- * @return the options this client will process.
- */
- public Options getOpts();
-
- /**
- * @return header description for this command
- */
- public String getHeaderDescription();
-
- /**
- * Do the processing
- * @param cl the arguments to process
- * @param stormConf the storm configuration to use
- * @throws Exception on any error
- */
- public void process(CommandLine cl) throws Exception;
- }
-
- public static class HelpCommand implements ClientCommand {
- HashMap<String, ClientCommand> _commands;
- public HelpCommand(HashMap<String, ClientCommand> commands) {
- _commands = commands;
- }
-
- @Override
- public Options getOpts() {
- return new Options();
- }
-
- @Override
- public String getHeaderDescription() {
- return "jstorm-yarn help";
- }
-
- @SuppressWarnings("unchecked")
- @Override
- public void process(CommandLine cl) throws Exception {
- printHelpFor(cl.getArgList());
- }
-
- public void printHelpFor(Collection<String> args) {
- if(args == null || args.size() < 1) {
- args = _commands.keySet();
- }
- HelpFormatter f = new HelpFormatter();
- for(String command: args) {
- ClientCommand c = _commands.get(command);
- if (c != null) {
- //TODO Show any arguments to the commands.
- f.printHelp(command, c.getHeaderDescription(), c.getOpts(), null);
- } else {
- System.err.println("ERROR: " + c + " is not a supported command.");
- //TODO make this exit with an error at some point
- }
- }
- }
- }
-
- /**
- * @param args the command line arguments
- * @throws Exception
- */
- @SuppressWarnings("rawtypes")
- public void execute(String[] args) throws Exception {
- HashMap<String, ClientCommand> commands = new HashMap<String, ClientCommand>();
- HelpCommand help = new HelpCommand(commands);
- commands.put("help", help);
- commands.put("launch", new LaunchCommand());
- commands.put("setStormConfig", new StormMasterCommand(StormMasterCommand.COMMAND.SET_STORM_CONFIG));
- commands.put("getStormConfig", new StormMasterCommand(StormMasterCommand.COMMAND.GET_STORM_CONFIG));
- commands.put("addSupervisors", new StormMasterCommand(StormMasterCommand.COMMAND.ADD_SUPERVISORS));
- commands.put("startNimbus", new StormMasterCommand(StormMasterCommand.COMMAND.START_NIMBUS));
- commands.put("stopNimbus", new StormMasterCommand(StormMasterCommand.COMMAND.STOP_NIMBUS));
- commands.put("startUI", new StormMasterCommand(StormMasterCommand.COMMAND.START_UI));
- commands.put("stopUI", new StormMasterCommand(StormMasterCommand.COMMAND.STOP_UI));
- commands.put("startSupervisors", new StormMasterCommand(StormMasterCommand.COMMAND.START_SUPERVISORS));
- commands.put("stopSupervisors", new StormMasterCommand(StormMasterCommand.COMMAND.STOP_SUPERVISORS));
- commands.put("shutdown", new StormMasterCommand(StormMasterCommand.COMMAND.SHUTDOWN));
- commands.put("version", new VersionCommand());
-
- String commandName = null;
- String[] commandArgs = null;
- if (args.length < 1) {
- commandName = "help";
- commandArgs = new String[0];
- } else {
- commandName = args[0];
- commandArgs = Arrays.copyOfRange(args, 1, args.length);
- }
- ClientCommand command = commands.get(commandName);
- if(command == null) {
- LOG.error("ERROR: " + commandName + " is not a supported command.");
- help.printHelpFor(null);
- System.exit(1);
- }
- Options opts = command.getOpts();
- if(!opts.hasOption("h")) {
- opts.addOption("h", "help", false, "print out a help message");
- }
- CommandLine cl = new GnuParser().parse(command.getOpts(), commandArgs);
- if(cl.hasOption("help")) {
- help.printHelpFor(Arrays.asList(commandName));
- } else {
-
- command.process(cl);
- }
- }
-
- public static void main(String[] args) throws Exception {
- Client client = new Client();
- client.execute(args);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Config.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Config.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Config.java
deleted file mode 100644
index 3bd7f15..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Config.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.Map;
-import backtype.storm.utils.Utils;
-
-import org.yaml.snakeyaml.Yaml;
-
-
-public class Config {
-
- final public static String MASTER_DEFAULTS_CONFIG = "master_defaults.yaml";
- final public static String MASTER_CONFIG = "master.yaml";
- final public static String MASTER_HOST = "master.host";
- final public static String MASTER_THRIFT_PORT = "master.thrift.port";
- final public static String MASTER_TIMEOUT_SECS = "master.timeout.secs";
- final public static String MASTER_SIZE_MB = "master.container.size-mb";
- final public static String MASTER_NUM_SUPERVISORS = "master.initial-num-supervisors";
- final public static String MASTER_CONTAINER_PRIORITY = "master.container.priority";
- //# of milliseconds to wait for YARN report on Storm Master host/port
- final public static String YARN_REPORT_WAIT_MILLIS = "yarn.report.wait.millis";
- final public static String MASTER_HEARTBEAT_INTERVAL_MILLIS = "master.heartbeat.interval.millis";
-
- @SuppressWarnings("rawtypes")
- static public Map readJStormConfig() {
- return readJStormConfig(null);
- }
-
- @SuppressWarnings({ "rawtypes", "unchecked" })
- static Map readJStormConfig(String jstormYarnConfigPath) {
- //default configurations
- Map ret = Utils.readDefaultConfig();
- Map conf = Utils.findAndReadConfigFile(Config.MASTER_DEFAULTS_CONFIG);
- ret.putAll(conf);
- System.out.println("default ret:" + ret);
-
- //standard storm configuration
- String confFile = System.getProperty("jstorm.conf.file");
-// String confFile = Util.getJStormHome() + "/conf/storm.yaml";
- Map storm_conf;
- if (confFile==null || confFile.equals("")) {
- storm_conf = Utils.findAndReadConfigFile("storm.yaml", false);
- } else {
- storm_conf = Utils.findAndReadConfigFile(confFile, true);
- }
- System.out.println("storm_conf:" + storm_conf);
- ret.putAll(storm_conf);
-
- //configuration file per command parameter
- if (jstormYarnConfigPath == null) {
- Map master_conf = Utils.findAndReadConfigFile(Config.MASTER_CONFIG, false);
- ret.putAll(master_conf);
- }
- else {
- try {
- Yaml yaml = new Yaml();
- FileInputStream is = new FileInputStream(jstormYarnConfigPath);
- Map storm_yarn_config = (Map) yaml.load(is);
- if(storm_yarn_config!=null)
- ret.putAll(storm_yarn_config);
- is.close();
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- //other configuration settings via CLS opts per system property: storm.options
- ret.putAll(Utils.readCommandLineOpts());
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/JStormOnYarn.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/JStormOnYarn.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/JStormOnYarn.java
deleted file mode 100644
index 3d76c91..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/JStormOnYarn.java
+++ /dev/null
@@ -1,335 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.net.URL;
-import java.net.URLDecoder;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Vector;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.mapreduce.security.TokenCache;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
-import org.apache.hadoop.yarn.api.protocolrecords.GetNewApplicationResponse;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
-import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
-import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.client.api.YarnClient;
-import org.apache.hadoop.yarn.client.api.YarnClientApplication;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.exceptions.YarnException;
-import org.apache.hadoop.yarn.util.Apps;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.util.Records;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.yarn.generated.StormMaster.Client;
-
-public class JStormOnYarn {
-
- private static final Logger LOG = LoggerFactory.getLogger(JStormOnYarn.class);
-
- private YarnClient _yarn;
- private YarnConfiguration _hadoopConf;
- private ApplicationId _appId;
- @SuppressWarnings("rawtypes")
- private Map _jstormConf;
- private MasterClient _client = null;
-
- private JStormOnYarn(@SuppressWarnings("rawtypes") Map stormConf) {
- this(null, stormConf);
- }
-
- private JStormOnYarn(ApplicationId appId, @SuppressWarnings("rawtypes") Map stormConf) {
- _hadoopConf = new YarnConfiguration();
- _yarn = YarnClient.createYarnClient();
- _jstormConf = stormConf;
- _appId = appId;
- _yarn.init(_hadoopConf);
- _yarn.start();
- }
-
-
- public static JStormOnYarn launchApplication(String appName, String queue,
- Integer amSize, Map stormConf, String storm_zip_location) throws Exception {
- JStormOnYarn storm = new JStormOnYarn(stormConf);
- storm.launchApp(appName, queue, amSize, storm_zip_location);
- List<String> test = new ArrayList<String>();
- test.add("abc");
- test.add("def");
- LOG.info("test List:" + test);
- return storm;
- }
-
- @SuppressWarnings({ "unchecked", "deprecation" })
- private void launchApp(String appName, String queue, Integer amMB,
- String storm_zip_location) throws Exception {
- LOG.debug("JStormOnYarn:launchApp() ...");
- YarnClientApplication client_app = _yarn.createApplication();
- GetNewApplicationResponse app = client_app.getNewApplicationResponse();
- _appId = app.getApplicationId();
- LOG.debug("_appId:"+_appId);
-
- if(amMB > app.getMaximumResourceCapability().getMemory()) {
- //TODO need some sanity checks
- amMB = app.getMaximumResourceCapability().getMemory();
- }
- ApplicationSubmissionContext appContext =
- Records.newRecord(ApplicationSubmissionContext.class);
- appContext.setApplicationId(app.getApplicationId());
- appContext.setApplicationName(appName);
- appContext.setQueue(queue);
-
- // Set up the container launch context for the application master
- ContainerLaunchContext amContainer = Records
- .newRecord(ContainerLaunchContext.class);
- Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
-
- // set local resources for the application master
- // local files or archives as needed
- // In this scenario, the jar file for the application master is part of the
- // local resources
- LOG.info("Copy App Master jar from local filesystem and add to local environment");
- // Copy the application master jar to the filesystem
- // Create a local resource to point to the destination jar path
- String appMasterJar = findContainingJar(MasterServer.class);
- FileSystem fs = FileSystem.get(_hadoopConf);
- Path src = new Path(appMasterJar);
- String appHome = Util.getApplicationHomeForId(_appId.toString());
- Path dst = new Path(fs.getHomeDirectory(),
- appHome + Path.SEPARATOR + "AppMaster.jar");
- fs.copyFromLocalFile(false, true, src, dst);
- localResources.put("AppMaster.jar", Util.newYarnAppResource(fs, dst));
-
- Version jstormVersion = Util.getJStormVersion();
- Path zip;
- if (storm_zip_location != null) {
- zip = new Path(storm_zip_location);
- } else {
- zip = new Path("/lib/jstorm/"+jstormVersion+"/jstorm.zip");
- }
- _jstormConf.put("jstorm.zip.path", zip.makeQualified(fs).toUri().getPath());
- LocalResourceVisibility visibility = LocalResourceVisibility.PUBLIC;
- _jstormConf.put("jstorm.zip.visibility", "PUBLIC");
- if (!Util.isPublic(fs, zip)) {
- visibility = LocalResourceVisibility.APPLICATION;
- _jstormConf.put("jstorm.zip.visibility", "APPLICATION");
- }
- localResources.put("jstorm", Util.newYarnAppResource(fs, zip, LocalResourceType.ARCHIVE, visibility));
-
- Path confDst = Util.createConfigurationFileInFs(fs, appHome, _jstormConf, _hadoopConf);
- // establish a symbolic link to conf directory
- localResources.put("conf", Util.newYarnAppResource(fs, confDst));
-
- LOG.info("appHome is: " + appHome);
- // Setup security tokens
- Path[] paths = new Path[3];
- paths[0] = dst;
- paths[1] = zip;
- paths[2] = confDst;
- Credentials credentials = new Credentials();
- TokenCache.obtainTokensForNamenodes(credentials, paths, _hadoopConf);
- DataOutputBuffer dob = new DataOutputBuffer();
- credentials.writeTokenStorageToStream(dob);
- ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
-
- //security tokens for HDFS distributed cache
- amContainer.setTokens(securityTokens);
-
- // Set local resource info into app master container launch context
- amContainer.setLocalResources(localResources);
-
- // Set the env variables to be setup in the env where the application master
- // will be run
- LOG.info("Set the environment for the application master");
- Map<String, String> env = new HashMap<String, String>();
- // add the runtime classpath needed for tests to work
- Apps.addToEnvironment(env, Environment.CLASSPATH.name(), "./conf");
- Apps.addToEnvironment(env, Environment.CLASSPATH.name(), "./AppMaster.jar");
-
- //Make sure that AppMaster has access to all YARN JARs
- List<String> yarn_classpath_cmd = java.util.Arrays.asList("yarn", "classpath");
- ProcessBuilder pb = new ProcessBuilder(yarn_classpath_cmd);
- LOG.info("YARN CLASSPATH COMMAND = [" + yarn_classpath_cmd + "]");
- pb.environment().putAll(System.getenv());
- Process proc = pb.start();
- Util.redirectStreamAsync(proc.getErrorStream(), System.err);
- BufferedReader reader = new BufferedReader(new InputStreamReader(proc.getInputStream(), "UTF-8"));
- String line = "";
- String yarn_class_path = (String) _jstormConf.get("jstorm.yarn.yarn_classpath");
- if (yarn_class_path == null){
- StringBuilder yarn_class_path_builder = new StringBuilder();
- while ((line = reader.readLine() ) != null){
- yarn_class_path_builder.append(line);
- }
- yarn_class_path = yarn_class_path_builder.toString();
- }
- LOG.info("YARN CLASSPATH = [" + yarn_class_path + "]");
- proc.waitFor();
- reader.close();
- Apps.addToEnvironment(env, Environment.CLASSPATH.name(), yarn_class_path);
-
- String stormHomeInZip = Util.getJStormHomeInZip(fs, zip, jstormVersion.version());
- Apps.addToEnvironment(env, Environment.CLASSPATH.name(), "./jstorm/" + stormHomeInZip + "/*");
- Apps.addToEnvironment(env, Environment.CLASSPATH.name(), "./jstorm/" + stormHomeInZip + "/lib/*");
-
- String java_home = (String) _jstormConf.get("jstorm.yarn.java_home");
- if (java_home == null)
- java_home = System.getenv("JAVA_HOME");
-
- if (java_home != null && !java_home.isEmpty())
- env.put("JAVA_HOME", java_home);
- LOG.info("Using JAVA_HOME = [" + env.get("JAVA_HOME") + "]");
-
- env.put("appJar", appMasterJar);
- env.put("appName", appName);
- env.put("appId", new Integer(_appId.getId()).toString());
- env.put("JSTORM_LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR);
- amContainer.setEnvironment(env);
- LOG.info("appMaster env:" + env);
-
- // Set the necessary command to execute the application master
- Vector<String> vargs = new Vector<String>();
- if (java_home != null && !java_home.isEmpty())
- vargs.add(env.get("JAVA_HOME") + "/bin/java");
- else
- vargs.add("java");
- vargs.add("-Djstorm.home=./jstorm/" + stormHomeInZip + "/");
- vargs.add("-Dlogfile.name=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/master.log");
- //vargs.add("-verbose:class");
- //for debug
-// vargs.add("-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000");
-
- vargs.add("com.alibaba.jstorm.yarn.MasterServer");
- vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout");
- vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stderr");
- // Set java executable command
- LOG.info("Setting up app master command:"+vargs);
-
- amContainer.setCommands(vargs);
-
- // Set up resource type requirements
- // For now, only memory and cpu are supported so we set memory & cpu requirements
- Resource capability = Records.newRecord(Resource.class);
- capability.setMemory(amMB);
- //TODO: get cpu cores from command
- capability.setVirtualCores(1);
- appContext.setResource(capability);
- appContext.setAMContainerSpec(amContainer);
-
- _yarn.submitApplication(appContext);
- }
-
- /**
- * Find a jar that contains a class of the same name, if any.
- * It will return a jar file, even if that is not the first thing
- * on the class path that has a class with the same name.
- *
- * @param my_class the class to find.
- * @return a jar file that contains the class, or null.
- * @throws IOException on any error
- */
- public static String findContainingJar(Class<?> my_class) throws IOException {
- ClassLoader loader = my_class.getClassLoader();
- String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
- for(Enumeration<URL> itr = loader.getResources(class_file);
- itr.hasMoreElements();) {
- URL url = itr.nextElement();
- if ("jar".equals(url.getProtocol())) {
- String toReturn = url.getPath();
- if (toReturn.startsWith("file:")) {
- toReturn = toReturn.substring("file:".length());
- }
- // URLDecoder is a misnamed class, since it actually decodes
- // x-www-form-urlencoded MIME type rather than actual
- // URL encoding (which the file path has). Therefore it would
- // decode +s to ' 's which is incorrect (spaces are actually
- // either unencoded or encoded as "%20"). Replace +s first, so
- // that they are kept sacred during the decoding process.
- toReturn = toReturn.replaceAll("\\+", "%2B");
- toReturn = URLDecoder.decode(toReturn, "UTF-8");
- return toReturn.replaceAll("!.*$", "");
- }
- }
-
- throw new IOException("Fail to locat a JAR for class: "+my_class.getName());
- }
-
- public ApplicationId getAppId() {
- return _appId;
- }
-
- public void stop() {
- if(_client != null) {
- _client.close();
- }
- _yarn.stop();
- }
-
- public Client getClient() throws YarnException, IOException {
- if (_client == null) {
- String host = null;
- int port = 0;
- //wait for application to be ready
- int max_wait_for_report = Utils.getInt(_jstormConf.get(Config.YARN_REPORT_WAIT_MILLIS));
- int waited=0;
- while (waited<max_wait_for_report) {
- ApplicationReport report = _yarn.getApplicationReport(_appId);
- host = report.getHost();
- port = report.getRpcPort();
- if (host == null || port==0) {
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- }
- waited += 1000;
- } else {
- break;
- }
- }
- if (host == null || port==0) {
- LOG.info("No host/port returned for Application Master " + _appId);
- return null;
- }
-
- LOG.info("application report for "+_appId+" :"+host+":"+port);
- if (_jstormConf == null ) {
- _jstormConf = new HashMap<Object,Object>();
- }
- _jstormConf.put(Config.MASTER_HOST, host);
- _jstormConf.put(Config.MASTER_THRIFT_PORT, port);
- LOG.info("Attaching to "+host+":"+port+" to talk to app master "+_appId);
- try {
- _client = MasterClient.getConfiguredClient(_jstormConf);
- } catch (Exception e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- LOG.error(host, e.getStackTrace());
- }
- }
- return _client.getClient();
- }
-
- public static JStormOnYarn attachToApp(String appId, Map stormConf) {
- return new JStormOnYarn(ConverterUtils.toApplicationId(appId), stormConf);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/LaunchCommand.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/LaunchCommand.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/LaunchCommand.java
deleted file mode 100644
index 3b19720..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/LaunchCommand.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.PrintStream;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Options;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.yarn.Client.ClientCommand;
-import com.alibaba.jstorm.yarn.generated.StormMaster;
-
-public class LaunchCommand implements ClientCommand {
-
- private static final Logger LOG = LoggerFactory.getLogger(LaunchCommand.class);
-
- @Override
- public String getHeaderDescription() {
- return "jstorm-yarn launch <master.yaml>";
- }
-
- @Override
- public Options getOpts() {
- Options opts = new Options();
- opts.addOption("appname", true, "Application Name. Default value - JStorm-on-Yarn");
- opts.addOption("queue", true, "RM Queue in which this application is to be submitted");
- opts.addOption("jstormHome", true, "JStorm Home Directory");
- opts.addOption("output", true, "Output file");
- opts.addOption("jstormConfOutput", true, "storm.yaml file");
- opts.addOption("jstormZip", true, "file path of jstorm.zip");
- return opts;
- }
-
- @Override
- public void process(CommandLine cl) throws Exception {
-
- String config_file = null;
- List remaining_args = cl.getArgList();
- if (remaining_args!=null && !remaining_args.isEmpty()) {
- config_file = (String)remaining_args.get(0);
- }
-
- Map stormConf = Config.readJStormConfig();
-
- LOG.info("initial storm conf:" + stormConf);
-
- String appName = cl.getOptionValue("appname", "JStorm-on-Yarn");
- String queue = cl.getOptionValue("queue", "default");
-
- String storm_zip_location = cl.getOptionValue("jstormZip");
- Integer amSize = (Integer) stormConf.get(Config.MASTER_SIZE_MB);
-
- System.out.println("amSize: " + amSize);
-
- JStormOnYarn jstorm = null;
- try {
- jstorm = JStormOnYarn.launchApplication(appName,
- queue, amSize,
- stormConf,
- storm_zip_location);
- LOG.debug("Submitted application's ID:" + jstorm.getAppId());
-
- //download storm.yaml file
- String storm_yaml_output = cl.getOptionValue("jstormConfOutput");
- if (storm_yaml_output != null && storm_yaml_output.length() > 0) {
- //try to download storm.yaml
- StormMaster.Client client = jstorm.getClient();
- if (client != null)
- StormMasterCommand.downloadStormYaml(client, storm_yaml_output);
- else
- LOG.warn("No storm.yaml is downloaded");
- }
-
- //store appID to output
- String output = cl.getOptionValue("output");
- if (output == null)
- System.out.println(jstorm.getAppId());
- else {
- PrintStream os = new PrintStream(output);
- os.println(jstorm.getAppId());
- os.flush();
- os.close();
- }
- } finally {
- if (jstorm != null) {
- jstorm.stop();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterClient.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterClient.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterClient.java
deleted file mode 100644
index d37cf9c..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterClient.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.util.Map;
-
-import org.apache.thrift7.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.yarn.generated.StormMaster;
-import com.alibaba.jstorm.yarn.generated.StormMaster.Client;
-import com.alibaba.jstorm.yarn.thrift.ThriftClient;
-
-import backtype.storm.utils.Utils;
-
-public class MasterClient extends ThriftClient {
- private StormMaster.Client _client;
- private static final Logger LOG = LoggerFactory.getLogger(MasterClient.class);
-
- public MasterClient(Map storm_conf, String masterHost, int masterPort, Integer timeout) throws Exception {
- super(storm_conf, masterHost, masterPort, timeout);
- _client = new StormMaster.Client(_protocol);
- }
-
- public static MasterClient getConfiguredClient(Map conf) throws Exception {
-
- try {
- String masterHost = (String) conf.get(Config.MASTER_HOST);
-// String zookeeper = conf.get(Config.)
- LOG.info("masterHost is:" + masterHost);
-
- int masterPort = Utils.getInt(conf.get(Config.MASTER_THRIFT_PORT));
- LOG.info("masterPort is" + masterPort);
- System.out.println("masterPort is" + masterPort);
- try {
- Integer timeout = Utils.getInt(conf.get(Config.MASTER_TIMEOUT_SECS));
- return new MasterClient(conf, masterHost, masterPort, timeout);
- } catch (IllegalArgumentException e) {
- return new MasterClient(conf, masterHost, masterPort, null);
- }
-
- } catch (TTransportException ex) {
- throw new RuntimeException(ex);
- }
- }
-
- public Client getClient() {
- return _client;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterServer.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterServer.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterServer.java
deleted file mode 100644
index e020e57..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/MasterServer.java
+++ /dev/null
@@ -1,255 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.Options;
-import org.apache.hadoop.net.NetUtils;
-import org.apache.hadoop.service.Service;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
-import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
-import org.apache.hadoop.yarn.api.records.AMCommand;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.thrift7.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-//import backtype.storm.security.auth.ThriftServer;
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.yarn.generated.StormMaster;
-import com.alibaba.jstorm.yarn.generated.StormMaster.Processor;
-import com.alibaba.jstorm.yarn.thrift.ThriftServer;
-
-//import org.apache.thrift7.TProcessor;
-
-public class MasterServer extends ThriftServer {
-
- private static final Logger LOG = LoggerFactory.getLogger(MasterServer.class);
- private static StormMasterServerHandler _handler;
-
- private Thread initAndStartHeartbeat(final StormAMRMClient client,
- final BlockingQueue<Container> launcherQueue,
- final int heartBeatIntervalMs) {
- Thread thread = new Thread() {
- @Override
- public void run() {
- try {
- while (client.getServiceState() == Service.STATE.STARTED &&
- !Thread.currentThread().isInterrupted()) {
-
- Thread.sleep(heartBeatIntervalMs);
-
- // We always send 50% progress.
- AllocateResponse allocResponse = client.allocate(0.5f);
-
- AMCommand am_command = allocResponse.getAMCommand();
- if (am_command!=null &&
- (am_command == AMCommand.AM_SHUTDOWN || am_command==AMCommand.AM_RESYNC)) {
- LOG.info("Got AM_SHUTDOWN or AM_RESYNC from the RM");
- _handler.stop();
- System.exit(0);
- }
-
- List<Container> allocatedContainers = allocResponse.getAllocatedContainers();
- if (allocatedContainers.size() > 0) {
- // Add newly allocated containers to the client.
- LOG.info("HB: Received allocated containers (" + allocatedContainers.size() + ")");
- client.addAllocatedContainers(allocatedContainers);
- if (client.supervisorsAreToRun()) {
- LOG.info("HB: Supervisors are to run, so queueing (" + allocatedContainers.size() + ") containers...");
- launcherQueue.addAll(allocatedContainers);
- } else {
- LOG.info("HB: Supervisors are to stop, so releasing all containers...");
- client.stopAllSupervisors();
- }
- }
-
- List<ContainerStatus> completedContainers =
- allocResponse.getCompletedContainersStatuses();
-
- if (completedContainers.size() > 0 && client.supervisorsAreToRun()) {
- LOG.debug("HB: Containers completed (" + completedContainers.size() + "), so releasing them.");
- client.startAllSupervisors();
- }
-
- }
- } catch (Throwable t) {
- // Something happened we could not handle. Make sure the AM goes
- // down so that we are not surprised later on that our heart
- // stopped..
- LOG.error("Unhandled error in AM: ", t);
- _handler.stop();
- System.exit(1);
- }
- }
- };
- thread.start();
- return thread;
- }
-
- @SuppressWarnings("unchecked")
- public static void main(String[] args) throws Exception {
- LOG.info("Starting the AM!!!!");
-
- Options opts = new Options();
- opts.addOption("app_attempt_id", true, "App Attempt ID. Not to be used " +
- "unless for testing purposes");
-
- CommandLine cl = new GnuParser().parse(opts, args);
-
- ApplicationAttemptId appAttemptID;
- Map<String, String> envs = System.getenv();
- if (cl.hasOption("app_attempt_id")) {
- String appIdStr = cl.getOptionValue("app_attempt_id", "");
- appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr);
- } else if (envs.containsKey(ApplicationConstants.Environment.CONTAINER_ID.name())) {
- ContainerId containerId = ConverterUtils.toContainerId(envs
- .get(ApplicationConstants.Environment.CONTAINER_ID.name()));
- appAttemptID = containerId.getApplicationAttemptId();
- LOG.info("appAttemptID from env:" + appAttemptID.toString());
- } else {
- LOG.error("appAttemptID is not specified for storm master");
- throw new Exception("appAttemptID is not specified for storm master");
- }
-
- @SuppressWarnings("rawtypes")
- Map storm_conf = Config.readJStormConfig(null);
- Util.rmNulls(storm_conf);
-
- YarnConfiguration hadoopConf = new YarnConfiguration();
-
- final String host = InetAddress.getLocalHost().getHostName();
- storm_conf.put("nimbus.host", host);
-
- StormAMRMClient rmClient =
- new StormAMRMClient(appAttemptID, storm_conf, hadoopConf);
- rmClient.init(hadoopConf);
- rmClient.start();
-
- BlockingQueue<Container> launcherQueue = new LinkedBlockingQueue<Container>();
-
- MasterServer server = new MasterServer(storm_conf, rmClient);
- try {
- final int port = Utils.getInt(storm_conf.get(Config.MASTER_THRIFT_PORT));
- final String target = host + ":" + port;
- InetSocketAddress addr = NetUtils.createSocketAddr(target);
- RegisterApplicationMasterResponse resp =
- rmClient.registerApplicationMaster(addr.getHostName(), port, null);
- LOG.info("Got a registration response "+resp);
- LOG.info("Max Capability "+resp.getMaximumResourceCapability());
- rmClient.setMaxResource(resp.getMaximumResourceCapability());
- LOG.info("Starting HB thread");
- server.initAndStartHeartbeat(rmClient, launcherQueue,
- (Integer) storm_conf
- .get(Config.MASTER_HEARTBEAT_INTERVAL_MILLIS));
- LOG.info("Starting launcher");
- initAndStartLauncher(rmClient, launcherQueue);
- rmClient.startAllSupervisors();
- LOG.info("Starting Master Thrift Server");
- server.serve();
- LOG.info("StormAMRMClient::unregisterApplicationMaster");
- rmClient.unregisterApplicationMaster(FinalApplicationStatus.SUCCEEDED,
- "AllDone", null);
- } finally {
- if (server.isServing()) {
- LOG.info("Stop Master Thrift Server");
- server.stop();
- }
- LOG.info("Stop RM client");
- rmClient.stop();
- }
- System.exit(0);
- }
-
- private static void initAndStartLauncher(final StormAMRMClient client,
- final BlockingQueue<Container> launcherQueue) {
- Thread thread = new Thread() {
- Container container;
- @Override
- public void run() {
- while (client.getServiceState() == Service.STATE.STARTED &&
- !Thread.currentThread().isInterrupted()) {
- try {
- container = launcherQueue.take();
- LOG.info("LAUNCHER: Taking container with id ("+container.getId()+") from the queue.");
- if (client.supervisorsAreToRun()) {
- LOG.info("LAUNCHER: Supervisors are to run, so launching container id ("+container.getId()+")");
- client.launchSupervisorOnContainer(container);
- } else {
- // Do nothing
- LOG.info("LAUNCHER: Supervisors are not to run, so not launching container id ("+container.getId()+")");
- }
- } catch (InterruptedException e) {
- if (client.getServiceState() == Service.STATE.STARTED) {
- LOG.error("Launcher thread interrupted : ", e);
- System.exit(1);
- }
- return;
- } catch (IOException e) {
- LOG.error("Launcher thread I/O exception : ", e);
- System.exit(1);
- }
- }
- }
- };
- thread.start();
- }
-
- public MasterServer(@SuppressWarnings("rawtypes") Map storm_conf,
- StormAMRMClient client) {
- this(storm_conf, new StormMasterServerHandler(storm_conf, client));
- }
-
- private MasterServer(@SuppressWarnings("rawtypes") Map storm_conf,
- StormMasterServerHandler handler) {
- super(storm_conf,
- new Processor<StormMaster.Iface>(handler),
- Utils.getInt(storm_conf.get(Config.MASTER_THRIFT_PORT)));
- try {
- _handler = handler;
- _handler.init(this);
-
- LOG.info("launch nimbus");
- _handler.startNimbus();
-
- LOG.info("launch ui");
- _handler.startUI();
-
- int numSupervisors =
- Utils.getInt(storm_conf.get(Config.MASTER_NUM_SUPERVISORS));
- LOG.info("launch " + numSupervisors + " supervisors");
- _handler.addSupervisors(numSupervisors);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void stop() {
- super.stop();
- if (_handler != null) {
- _handler.stop();
- _handler = null;
- }
- }
-
- public String getStormConf() throws TException {
- return _handler.getStormConf();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormAMRMClient.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormAMRMClient.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormAMRMClient.java
deleted file mode 100644
index 7dcc766..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormAMRMClient.java
+++ /dev/null
@@ -1,214 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.Container;
-import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
-import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
-import org.apache.hadoop.yarn.api.records.Priority;
-import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hadoop.yarn.client.api.impl.AMRMClientImpl;
-import org.apache.hadoop.yarn.client.api.impl.NMClientImpl;
-import org.apache.hadoop.yarn.client.api.AMRMClient.ContainerRequest;
-import org.apache.hadoop.yarn.client.api.NMClient;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.util.Records;
-import org.mortbay.log.Log;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.utils.Utils;
-
-public class StormAMRMClient extends AMRMClientImpl<ContainerRequest> {
-
- private static final Logger LOG = LoggerFactory.getLogger(StormAMRMClient.class);
-
- @SuppressWarnings("rawtypes")
- private final Map storm_conf;
- private final YarnConfiguration hadoopConf;
- private final Priority DEFAULT_PRIORITY = Records.newRecord(Priority.class);
- private final Set<Container> containers;
- private volatile boolean supervisorsAreToRun = false;
- private AtomicInteger numSupervisors;
- private Resource maxResourceCapability;
- private Resource defaultResource;
- private ApplicationAttemptId appAttemptId;
- private NMClientImpl nmClient;
-
- public StormAMRMClient(ApplicationAttemptId appAttemptID, @SuppressWarnings("rawtypes") Map storm_conf,
- YarnConfiguration hadoopConf) {
- this.appAttemptId = appAttemptID;
- this.storm_conf = storm_conf;
- this.hadoopConf = hadoopConf;
- Integer pri = Utils.getInt(storm_conf.get(Config.MASTER_CONTAINER_PRIORITY));
- this.DEFAULT_PRIORITY.setPriority(pri);
- this.containers = new TreeSet<Container>();
- numSupervisors = new AtomicInteger(0);
-
- // start am nm client
- nmClient = (NMClientImpl) NMClient.createNMClient();
- nmClient.init(hadoopConf);
- nmClient.start();
- }
-
- public void setMaxResource(Resource maximumResourceCapability) {
- this.maxResourceCapability = maximumResourceCapability;
- this.defaultResource = maximumResourceCapability;
- this.defaultResource.setMemory(1024);
- this.defaultResource.setVirtualCores(1);
- LOG.info("Max Capability is now "+this.maxResourceCapability);
- }
-
- public synchronized void startAllSupervisors() {
- LOG.debug("Starting all supervisors, requesting containers...");
- this.supervisorsAreToRun = true;
- this.addSupervisorsRequest();
- }
-
- private void addSupervisorsRequest() {
- int num = numSupervisors.getAndSet(0);
- for (int i=0; i<num; i++) {
-// ContainerRequest req = new ContainerRequest(this.maxResourceCapability,
- ContainerRequest req = new ContainerRequest(this.defaultResource,
- null, // String[] nodes,
- null, // String[] racks,
- DEFAULT_PRIORITY);
- super.addContainerRequest(req);
- }
- }
-
- public synchronized boolean addAllocatedContainers(List<Container> containers) {
- for (int i=0; i<containers.size(); i++) {
-// ContainerRequest req = new ContainerRequest(this.maxResourceCapability,
- ContainerRequest req = new ContainerRequest(this.defaultResource,
- null, // String[] nodes,
- null, // String[] racks,
- DEFAULT_PRIORITY);
- super.removeContainerRequest(req);
- }
- return this.containers.addAll(containers);
-
- }
-
- public boolean supervisorsAreToRun() {
- return this.supervisorsAreToRun;
- }
-
- public void stopAllSupervisors() {
- LOG.debug("Stopping all supervisors, releasing all containers...");
- this.supervisorsAreToRun = false;
- releaseAllSupervisorsRequest();
- }
-
- private void releaseAllSupervisorsRequest() {
- Iterator<Container> it = this.containers.iterator();
- ContainerId id;
- while (it.hasNext()) {
- id = it.next().getId();
- LOG.debug("Releasing container (id:"+id+")");
- releaseAssignedContainer(id);
- it.remove();
- }
- }
-
- public void launchSupervisorOnContainer(Container container)
- throws IOException{
- // create a container launch context
- ContainerLaunchContext launchContext = Records.newRecord(ContainerLaunchContext.class);
- UserGroupInformation user = UserGroupInformation.getCurrentUser();
- try {
- Credentials credentials = user.getCredentials();
- DataOutputBuffer dob = new DataOutputBuffer();
- credentials.writeTokenStorageToStream(dob);
- ByteBuffer securityTokens = ByteBuffer.wrap(dob.getData(), 0, dob.getLength());
- launchContext.setTokens(securityTokens);
- } catch (IOException e) {
- LOG.warn("Getting current user info failed when trying to launch the container"
- + e.getMessage());
- }
-
- // CLC: env
- Map<String, String> env = new HashMap<String, String>();
- env.put("JSTORM_LOG_DIR", ApplicationConstants.LOG_DIR_EXPANSION_VAR);
- launchContext.setEnvironment(env);
- LOG.info("container launch env:" + env);
-
- // CLC: local resources includes storm, conf
- Map<String, LocalResource> localResources = new HashMap<String, LocalResource>();
- String jstorm_zip_path = (String) storm_conf.get("jstorm.zip.path");
- Path zip = new Path(jstorm_zip_path);
- FileSystem fs = FileSystem.get(hadoopConf);
- String vis = (String) storm_conf.get("jstorm.zip.visibility");
- if (vis.equals("PUBLIC"))
- localResources.put("jstorm", Util.newYarnAppResource(fs, zip,
- LocalResourceType.ARCHIVE, LocalResourceVisibility.PUBLIC));
- else if (vis.equals("PRIVATE"))
- localResources.put("jstorm", Util.newYarnAppResource(fs, zip,
- LocalResourceType.ARCHIVE, LocalResourceVisibility.PRIVATE));
- else if (vis.equals("APPLICATION"))
- localResources.put("jstorm", Util.newYarnAppResource(fs, zip,
- LocalResourceType.ARCHIVE, LocalResourceVisibility.APPLICATION));
-
- LOG.info("local resources:" + localResources);
- LOG.info("appAttemptId:" + appAttemptId);
- String appHome = Util.getApplicationHomeForId(appAttemptId.toString());
-
- String containerHome = appHome + Path.SEPARATOR + container.getId().getId();
-
- Path confDst = Util.createConfigurationFileInFs(fs, containerHome,
- this.storm_conf, this.hadoopConf);
-
- localResources.put("conf", Util.newYarnAppResource(fs, confDst));
-
- launchContext.setLocalResources(localResources);
-
- // CLC: command
- List<String> supervisorArgs = Util.buildSupervisorCommands(this.storm_conf);
- LOG.info("start supervisor command: " + supervisorArgs);
- launchContext.setCommands(supervisorArgs);
-
- try {
- LOG.info("Use NMClient to launch supervisors in container. ");
- nmClient.startContainer(container, launchContext);
-
- String userShortName = user.getShortUserName();
- if (userShortName != null)
- LOG.info("Supervisor log: http://" + container.getNodeHttpAddress() + "/node/containerlogs/"
- + container.getId().toString() + "/" + userShortName + "/supervisor.log");
- } catch (Exception e) {
- LOG.error("Caught an exception while trying to start a container", e);
- System.exit(-1);
- }
-
- }
-
- public void addSupervisors(int number) {
- int num = numSupervisors.addAndGet(number);
- if (this.supervisorsAreToRun) {
- LOG.info("Added " + num + " supervisors, and requesting containers...");
- addSupervisorsRequest();
- } else {
- LOG.info("Added " + num + " supervisors, but not requesting containers now.");
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterCommand.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterCommand.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterCommand.java
deleted file mode 100644
index cadd09b..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterCommand.java
+++ /dev/null
@@ -1,191 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.FileWriter;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Options;
-import org.apache.thrift7.TException;
-import org.apache.thrift7.transport.TTransportException;
-import org.json.simple.JSONValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
-
-import com.alibaba.jstorm.yarn.Client.ClientCommand;
-import com.alibaba.jstorm.yarn.generated.StormMaster;
-import com.alibaba.jstorm.yarn.generated.StormMaster.Client;
-
-public class StormMasterCommand implements ClientCommand {
-
- private static final Logger LOG = LoggerFactory.getLogger(StormMasterCommand.class);
- enum COMMAND {
- GET_STORM_CONFIG,
- SET_STORM_CONFIG,
- START_NIMBUS,
- STOP_NIMBUS,
- START_UI,
- STOP_UI,
- ADD_SUPERVISORS,
- START_SUPERVISORS,
- STOP_SUPERVISORS,
- SHUTDOWN
- };
- COMMAND cmd;
-
- StormMasterCommand(COMMAND cmd) {
- this.cmd = cmd;
- }
- public static void downloadStormYaml(Client client, String storm_yaml_output) {
- String conf_str = "Not Avaialble";
-
- //fetch storm.yaml from Master
- try {
- conf_str = client.getStormConf();
- } catch (TTransportException ex) {
- LOG.error("Exception in downloading storm.yaml", ex);
- } catch (TException ex) {
- LOG.error("Exception in downloading storm.yaml", ex);
- }
-
- //storm the fetched storm.yaml into storm_yaml_output or stdout
- try {
- Object json = JSONValue.parse(conf_str);
- Map<?, ?> conf = (Map<?, ?>)json;
- Yaml yaml = new Yaml();
-
- if (storm_yaml_output == null) {
- LOG.info("storm.yaml downloaded:");
- System.out.println(yaml.dump(conf));
- } else {
- FileWriter out = new FileWriter(storm_yaml_output);
- yaml.dump(conf, out);
- out.flush();
- out.close();
- LOG.info("storm.yaml downloaded into "+storm_yaml_output);
- }
- } catch (Exception ex) {
- LOG.error("Exception in storing storm.yaml. ", ex);
- }
- }
- @Override
- public Options getOpts() {
- Options opts = new Options();
- //TODO can we make this required
- opts.addOption("appId", true, "(Required) The storm clusters app ID");
-
- opts.addOption("output", true, "Output file");
- opts.addOption("supervisors", true, "(Required for addSupervisors) The # of supervisors to be added");
- return opts;
- }
- @Override
- public String getHeaderDescription() {
- // TODO Auto-generated method stub
- return null;
- }
- @Override
- public void process(CommandLine cl) throws Exception {
- String config_file = null;
- List remaining_args = cl.getArgList();
- if (remaining_args!=null && !remaining_args.isEmpty()) {
- config_file = (String)remaining_args.get(0);
- }
- Map stormConf = Config.readJStormConfig(null);
-
- String appId = cl.getOptionValue("appId");
- if(appId == null) {
- throw new IllegalArgumentException("-appId is required");
- }
-
- JStormOnYarn storm = null;
- try {
- storm = JStormOnYarn.attachToApp(appId, stormConf);
- StormMaster.Client client = storm.getClient();
- switch (cmd) {
- case GET_STORM_CONFIG:
- downloadStormYaml(client, cl.getOptionValue("output"));
- break;
-
- case SET_STORM_CONFIG:
- String storm_conf_str = JSONValue.toJSONString(stormConf);
- try {
- client.setStormConf(storm_conf_str);
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case ADD_SUPERVISORS:
- String supversiors = cl.getOptionValue("supervisors", "1");
- try {
- client.addSupervisors(new Integer(supversiors).intValue());
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case START_NIMBUS:
- try {
- client.startNimbus();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case STOP_NIMBUS:
- try {
- client.stopNimbus();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case START_UI:
- try {
- client.startUI();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case STOP_UI:
- try {
- client.stopUI();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case START_SUPERVISORS:
- try {
- client.startSupervisors();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case STOP_SUPERVISORS:
- try {
- client.stopSupervisors();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
-
- case SHUTDOWN:
- try {
- client.shutdown();
- } catch (TTransportException ex) {
- LOG.info(ex.toString());
- }
- break;
- }
- } finally {
- if (storm != null) {
- storm.stop();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterServerHandler.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterServerHandler.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterServerHandler.java
deleted file mode 100644
index 74f457f..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/StormMasterServerHandler.java
+++ /dev/null
@@ -1,220 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.UnknownHostException;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.thrift7.TException;
-import org.json.simple.JSONValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
-
-import com.alibaba.jstorm.yarn.generated.*;
-import com.google.common.base.Joiner;
-
-public class StormMasterServerHandler implements StormMaster.Iface {
-
- private static final Logger LOG = LoggerFactory.getLogger(StormMasterServerHandler.class);
- @SuppressWarnings("rawtypes")
- Map _storm_conf;
- StormAMRMClient _client;
- MasterServer _masterServer;
-
- public StormMasterServerHandler(@SuppressWarnings("rawtypes") Map storm_conf, StormAMRMClient client) {
- _storm_conf = storm_conf;
- setStormHostConf();
- Util.rmNulls(_storm_conf);
- _client = client;
- }
-
- @SuppressWarnings("unchecked")
- private void setStormHostConf() {
- try {
- String host_addr = InetAddress.getLocalHost().getHostAddress();
- LOG.info("Storm master host:"+host_addr);
- _storm_conf.put(Config.NIMBUS_HOST, host_addr);
- } catch (UnknownHostException ex) {
- LOG.warn("Failed to get IP address of local host");
- }
- }
-
- @Override
- public String getStormConf() throws TException {
- LOG.info("getting configuration...");
- return JSONValue.toJSONString(_storm_conf);
- }
-
- @Override
- public void setStormConf(String storm_conf) throws TException {
- LOG.info("setting configuration...");
-
- // stop processes
- stopSupervisors();
- stopUI();
- stopNimbus();
-
- Object json = JSONValue.parse(storm_conf);
- Map<?, ?> new_conf = (Map<?, ?>)json;
- _storm_conf.putAll(new_conf);
- Util.rmNulls(_storm_conf);
- setStormHostConf();
-
- // start processes
- startNimbus();
- startUI();
- startSupervisors();
- }
-
- @Override
- public void addSupervisors(int number) throws TException {
- LOG.info("adding "+number+" supervisors...");
- _client.addSupervisors(number);
- }
-
-
- class JStormProcess extends Thread {
- Process _process;
- String _name;
-
- public JStormProcess(String name){
- _name = name;
- }
-
- public void run(){
- startJStormProcess();
- try {
- _process.waitFor();
- LOG.info("Storm process "+_name+" stopped");
- } catch (InterruptedException e) {
- LOG.info("Interrupted => will stop the storm process too");
- _process.destroy();
- }
- }
-
- private void startJStormProcess() {
- try {
- LOG.info("Running: " + Joiner.on(" ").join(buildCommands()));
- LOG.info("current working dir:" + System.getProperty("user.dir"));
- ProcessBuilder builder =
- new ProcessBuilder(buildCommands());
-
- _process = builder.start();
-// Util.redirectStreamAsync(_process.getInputStream(), System.out);
-// Util.redirectStreamAsync(_process.getErrorStream(), System.err);
-
- } catch (IOException e) {
- LOG.warn("Error starting nimbus process ", e);
- }
- }
-
- private List<String> buildCommands() throws IOException {
- if (_name == "nimbus") {
- return Util.buildNimbusCommands(_storm_conf);
- } else if (_name == "ui") {
- return Util.buildUICommands(_storm_conf);
- }
-
- throw new IllegalArgumentException(
- "Cannot build command list for \"" + _name + "\"");
- }
-
- public void stopJStormProcess() {
- _process.destroy();
- }
- }
-
- JStormProcess nimbusProcess;
- JStormProcess uiProcess;
-
- @Override
- public void startNimbus() throws TException {
- LOG.info("starting nimbus...");
- synchronized(this) {
- if (nimbusProcess!=null && nimbusProcess.isAlive()){
- LOG.info("Received a request to start nimbus, but it is running now");
- return;
- }
- nimbusProcess = new JStormProcess("nimbus");
- nimbusProcess.start();
- }
- }
-
- @Override
- public void stopNimbus() throws TException {
- synchronized(this) {
- if (nimbusProcess == null) return;
- LOG.info("stopping nimbus...");
- if (!nimbusProcess.isAlive()){
- LOG.info("Received a request to stop nimbus, but it is not running now");
- return;
- }
- nimbusProcess.stopJStormProcess();
- nimbusProcess = null;
- }
- }
-
- @Override
- public void startUI() throws TException {
- LOG.info("starting UI...");
- synchronized(this) {
- if (uiProcess!=null && uiProcess.isAlive()){
- LOG.info("Received a request to start UI, but it is running now");
- return;
- }
- uiProcess = new JStormProcess("ui");
- uiProcess.start();
- }
- }
-
- @Override
- public void stopUI() throws TException {
- synchronized(this) {
- if (uiProcess == null) return;
- LOG.info("stopping UI...");
- if (!uiProcess.isAlive()){
- LOG.info("Received a request to stop UI, but it is not running now");
- return;
- }
- uiProcess.stopJStormProcess();
- uiProcess = null;
- }
- }
-
- @Override
- public void startSupervisors() throws TException {
- LOG.info("starting supervisors...");
- _client.startAllSupervisors();
- }
-
- @Override
- public void stopSupervisors() throws TException {
- LOG.info("stopping supervisors...");
- _client.stopAllSupervisors();
- }
-
- @Override
- public void shutdown() throws TException {
- LOG.info("shutdown storm master...");
- _masterServer.stop();
- }
-
- public void init(MasterServer masterServer) {
- _masterServer = masterServer;
- }
-
- public void stop() {
- try {
- stopSupervisors();
- stopUI();
- stopNimbus();
- } catch (TException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- }
-
-}
[58/60] [abbrv] storm git commit: remove jstorm-utility directory
Posted by pt...@apache.org.
remove jstorm-utility directory
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/e8f64d5e
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/e8f64d5e
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/e8f64d5e
Branch: refs/heads/jstorm-import
Commit: e8f64d5e88a8f5c29c1633104d06de0970d57676
Parents: e1f6844
Author: P. Taylor Goetz <pt...@gmail.com>
Authored: Thu Nov 5 15:23:13 2015 -0500
Committer: P. Taylor Goetz <pt...@gmail.com>
Committed: Thu Nov 5 15:23:13 2015 -0500
----------------------------------------------------------------------
.gitmodules | 3 -
jstorm-utility/jstorm-kafka/.gitignore | 1 -
jstorm-utility/jstorm-kafka/README.md | 0
jstorm-utility/jstorm-kafka/pom.xml | 146 --------
.../java/com/alibaba/jstorm/kafka/Host.java | 57 ----
.../com/alibaba/jstorm/kafka/KafkaConsumer.java | 241 -------------
.../alibaba/jstorm/kafka/KafkaMessageId.java | 27 --
.../com/alibaba/jstorm/kafka/KafkaSpout.java | 124 -------
.../alibaba/jstorm/kafka/KafkaSpoutConfig.java | 130 -------
.../alibaba/jstorm/kafka/PartitionConsumer.java | 227 -------------
.../jstorm/kafka/PartitionCoordinator.java | 49 ---
.../java/com/alibaba/jstorm/kafka/ZkState.java | 95 ------
.../alibaba/jstorm/test/kafka/KafkaTest.java | 57 ----
jstorm-utility/jstorm-rocket-mq/pom.xml | 94 -----
.../alibaba/aloha/meta/MetaClientConfig.java | 263 --------------
.../alibaba/aloha/meta/MetaConsumerFactory.java | 109 ------
.../java/com/alibaba/aloha/meta/MetaSpout.java | 248 --------------
.../java/com/alibaba/aloha/meta/MetaTuple.java | 90 -----
.../aloha/meta/example/TestTopology.java | 150 --------
.../alibaba/aloha/meta/example/WriterBolt.java | 59 ----
.../target/classes/META-INF/MANIFEST.MF | 5 -
.../com.alibaba.jstorm/metaspout/pom.properties | 7 -
.../maven/com.alibaba.jstorm/metaspout/pom.xml | 94 -----
.../test/main/resources/metaspout.yaml | 32 --
jstorm-utility/ons/conf/ons.yaml | 49 ---
jstorm-utility/ons/pom.xml | 101 ------
.../java/com/alibaba/jstorm/LoadConfig.java | 67 ----
.../java/com/alibaba/jstorm/TestTopology.java | 80 -----
.../java/com/alibaba/jstorm/ons/OnsConfig.java | 69 ----
.../java/com/alibaba/jstorm/ons/OnsTuple.java | 80 -----
.../jstorm/ons/consumer/ConsumerConfig.java | 65 ----
.../jstorm/ons/consumer/ConsumerFactory.java | 49 ---
.../jstorm/ons/consumer/ConsumerSpout.java | 268 ---------------
.../jstorm/ons/producer/ProducerBolt.java | 94 -----
.../jstorm/ons/producer/ProducerConfig.java | 29 --
.../jstorm/ons/producer/ProducerFactory.java | 59 ----
.../ons/test/main/resources/metaspout.yaml | 32 --
jstorm-utility/rocket-mq | 1 -
jstorm-utility/topology-monitor/.gitignore | 13 -
jstorm-utility/topology-monitor/README.md | 2 -
jstorm-utility/topology-monitor/pom.xml | 110 ------
.../cosmos/BlackholeBlockingQueueSpout.java | 114 -------
.../com/dianping/cosmos/BlackholeSpout.java | 101 ------
.../com/dianping/cosmos/MessageFetcher.java | 50 ---
.../java/com/dianping/cosmos/PumaSpout.java | 194 -----------
.../java/com/dianping/cosmos/RedisSinkBolt.java | 167 ---------
.../main/java/com/dianping/cosmos/Updater.java | 9 -
.../cosmos/metric/CatMetricsConsumer.java | 70 ----
.../dianping/cosmos/monitor/HttpCatClient.java | 57 ----
.../cosmos/monitor/HttpClientService.java | 120 -------
.../dianping/cosmos/monitor/SpoutCounter.java | 24 --
.../cosmos/monitor/TopologyMonitor.java | 90 -----
.../monitor/topology/ClusterInfoBolt.java | 170 ----------
.../monitor/topology/ClusterInfoTopology.java | 18 -
.../com/dianping/cosmos/util/CatClient.java | 19 --
.../com/dianping/cosmos/util/CatMetricUtil.java | 45 ---
.../com/dianping/cosmos/util/Constants.java | 9 -
.../java/com/dianping/cosmos/util/JSONUtil.java | 125 -------
.../com/dianping/cosmos/util/TupleHelpers.java | 33 --
.../transaction_meta_spout/conf/topology.yaml | 21 --
jstorm-utility/transaction_meta_spout/pom.xml | 68 ----
.../batch/example/BatchMetaRebalance.java | 108 ------
.../jstorm/batch/example/BatchMetaSpout.java | 131 -------
.../jstorm/batch/example/BatchMetaTopology.java | 163 ---------
.../alibaba/jstorm/batch/example/CountBolt.java | 84 -----
.../alibaba/jstorm/batch/example/DBBolt.java | 261 --------------
.../jstorm/batch/example/TransformBolt.java | 63 ----
.../jstorm/batch/meta/MetaSimpleClient.java | 340 -------------------
.../jstorm/batch/meta/MetaSpoutConfig.java | 119 -------
.../src/main/resources/metaspout.default.prop | 15 -
70 files changed, 6264 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/.gitmodules
----------------------------------------------------------------------
diff --git a/.gitmodules b/.gitmodules
index c68bb15..e69de29 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,3 +0,0 @@
-[submodule "jstorm-utility/rocket-mq"]
- path = jstorm-utility/rocket-mq
- url = https://github.com/rocketmq/rocketmq-storm
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/.gitignore
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/.gitignore b/jstorm-utility/jstorm-kafka/.gitignore
deleted file mode 100644
index b83d222..0000000
--- a/jstorm-utility/jstorm-kafka/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-/target/
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/README.md
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/README.md b/jstorm-utility/jstorm-kafka/README.md
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/pom.xml b/jstorm-utility/jstorm-kafka/pom.xml
deleted file mode 100755
index df7c0ea..0000000
--- a/jstorm-utility/jstorm-kafka/pom.xml
+++ /dev/null
@@ -1,146 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
- <parent>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-all</artifactId>
- <version>0.9.6.2</version>
- <relativePath>../..</relativePath>
- </parent>
-
- <modelVersion>4.0.0</modelVersion>
- <artifactId>jstorm-kafka</artifactId>
- <packaging>jar</packaging>
-
- <name>${project.artifactId}-${project.version}</name>
- <description>jstorm kafka</description>
-
- <url>http://maven.apache.org</url>
-
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <jstorm.version>${parent.version}</jstorm.version>
- <kafka.version>0.8.1</kafka.version>
- <curator.version>1.3.2</curator.version>
- </properties>
-
- <dependencies>
-
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>org.apache.kafka</groupId>
- <artifactId>kafka_2.9.2</artifactId>
- <version>${kafka.version}</version>
- <exclusions>
- <exclusion>
- <groupId>org.apache.zookeeper</groupId>
- <artifactId>zookeeper</artifactId>
- </exclusion>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>org.apache.zookeeper</groupId>
- <artifactId>zookeeper</artifactId>
- <version>3.4.5</version>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
- <dependency>
- <groupId>com.netflix.curator</groupId>
- <artifactId>curator-framework</artifactId>
- <version>${curator.version}</version>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
-
-
- <dependency>
- <groupId>com.netflix.curator</groupId>
- <artifactId>curator-recipes</artifactId>
- <version>${curator.version}</version>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- </exclusions>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.netflix.curator</groupId>
- <artifactId>curator-test</artifactId>
- <version>${curator.version}</version>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.testng</groupId>
- <artifactId>testng</artifactId>
- </exclusion>
- </exclusions>
- <scope>test</scope>
- </dependency>
-
-
- </dependencies>
-
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- <encoding>${project.build.sourceEncoding}</encoding>
- </configuration>
- </plugin>
- </plugins>
- </build>
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/Host.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/Host.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/Host.java
deleted file mode 100644
index 36227a0..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/Host.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import java.io.Serializable;
-/**
- *
- * @author feilaoda
- *
- */
-public class Host implements Serializable {
- /**
- *
- */
- private static final long serialVersionUID = -315213440689707962L;
- private String host;
- private int port;
-
- public Host(String host) {
- this(host, 9092);
- }
-
- public Host(String host, int port) {
- this.host = host;
- this.port = port;
- }
-
- public String getHost() {
- return host;
- }
-
- public void setHost(String host) {
- this.host = host;
- }
-
- public int getPort() {
- return port;
- }
-
- public void setPort(int port) {
- this.port = port;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj) {
- return true;
- }
- if (obj == null || getClass() != obj.getClass()) {
- return false;
- }
- if (obj instanceof Host) {
- final Host other = (Host) obj;
- return this.host.equals(other.host) && this.port == other.port;
- } else {
- return false;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaConsumer.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaConsumer.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaConsumer.java
deleted file mode 100644
index 787b285..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaConsumer.java
+++ /dev/null
@@ -1,241 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import java.io.IOException;
-import java.net.ConnectException;
-import java.net.SocketTimeoutException;
-import java.nio.channels.UnresolvedAddressException;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import kafka.api.FetchRequest;
-import kafka.api.FetchRequestBuilder;
-import kafka.api.PartitionOffsetRequestInfo;
-import kafka.cluster.Broker;
-import kafka.common.ErrorMapping;
-import kafka.common.KafkaException;
-import kafka.common.TopicAndPartition;
-import kafka.javaapi.FetchResponse;
-import kafka.javaapi.OffsetRequest;
-import kafka.javaapi.PartitionMetadata;
-import kafka.javaapi.TopicMetadata;
-import kafka.javaapi.TopicMetadataRequest;
-import kafka.javaapi.consumer.SimpleConsumer;
-import kafka.javaapi.message.ByteBufferMessageSet;
-
-/**
- *
- * @author feilaoda
- *
- */
-public class KafkaConsumer {
-
- private static Logger LOG = Logger.getLogger(KafkaConsumer.class);
-
- public static final int NO_OFFSET = -1;
-
- private int status;
- private SimpleConsumer consumer = null;
-
- private KafkaSpoutConfig config;
- private LinkedList<Host> brokerList;
- private int brokerIndex;
- private Broker leaderBroker;
-
- public KafkaConsumer(KafkaSpoutConfig config) {
- this.config = config;
- this.brokerList = new LinkedList<Host>(config.brokers);
- this.brokerIndex = 0;
- }
-
- public ByteBufferMessageSet fetchMessages(int partition, long offset) throws IOException {
-
- String topic = config.topic;
- FetchRequest req = new FetchRequestBuilder().clientId(config.clientId).addFetch(topic, partition, offset, config.fetchMaxBytes)
- .maxWait(config.fetchWaitMaxMs).build();
- FetchResponse fetchResponse = null;
- SimpleConsumer simpleConsumer = null;
- try {
- simpleConsumer = findLeaderConsumer(partition);
- if (simpleConsumer == null) {
- // LOG.error(message);
- return null;
- }
- fetchResponse = simpleConsumer.fetch(req);
- } catch (Exception e) {
- if (e instanceof ConnectException || e instanceof SocketTimeoutException || e instanceof IOException
- || e instanceof UnresolvedAddressException) {
- LOG.warn("Network error when fetching messages:", e);
- if (simpleConsumer != null) {
- String host = simpleConsumer.host();
- int port = simpleConsumer.port();
- simpleConsumer = null;
- throw new KafkaException("Network error when fetching messages: " + host + ":" + port + " , " + e.getMessage(), e);
- }
-
- } else {
- throw new RuntimeException(e);
- }
- }
- if (fetchResponse.hasError()) {
- short code = fetchResponse.errorCode(topic, partition);
- if (code == ErrorMapping.OffsetOutOfRangeCode() && config.resetOffsetIfOutOfRange) {
- long startOffset = getOffset(topic, partition, config.startOffsetTime);
- offset = startOffset;
- }
- if(leaderBroker != null) {
- LOG.error("fetch data from kafka topic[" + config.topic + "] host[" + leaderBroker.host() + ":" + leaderBroker.port() + "] partition["
- + partition + "] error:" + code);
- }else {
-
- }
- return null;
- } else {
- ByteBufferMessageSet msgs = fetchResponse.messageSet(topic, partition);
- return msgs;
- }
- }
-
- private SimpleConsumer findLeaderConsumer(int partition) {
- try {
- if (consumer != null) {
- return consumer;
- }
- PartitionMetadata metadata = findLeader(partition);
- if (metadata == null) {
- leaderBroker = null;
- consumer = null;
- return null;
- }
- leaderBroker = metadata.leader();
- consumer = new SimpleConsumer(leaderBroker.host(), leaderBroker.port(), config.socketTimeoutMs, config.socketReceiveBufferBytes,
- config.clientId);
-
- return consumer;
- } catch (Exception e) {
- LOG.error(e.getMessage(), e);
- }
- return null;
- }
-
- protected PartitionMetadata findLeader(int partition) {
- PartitionMetadata returnMetaData = null;
- int errors = 0;
- int size = brokerList.size();
-
- Host brokerHost = brokerList.get(brokerIndex);
- try {
- if (consumer == null) {
- consumer = new SimpleConsumer(brokerHost.getHost(), brokerHost.getPort(), config.socketTimeoutMs, config.socketReceiveBufferBytes,
- config.clientId);
- }
- } catch (Exception e) {
- LOG.warn(e.getMessage(), e);
- consumer = null;
- }
- int i = brokerIndex;
- loop: while (i < size && errors < size + 1) {
- Host host = brokerList.get(i);
- i = (i + 1) % size;
- brokerIndex = i; // next index
- try {
-
- if (consumer == null) {
- consumer = new SimpleConsumer(host.getHost(), host.getPort(), config.socketTimeoutMs, config.socketReceiveBufferBytes,
- config.clientId);
- }
- List<String> topics = Collections.singletonList(config.topic);
- TopicMetadataRequest req = new TopicMetadataRequest(topics);
- kafka.javaapi.TopicMetadataResponse resp = null;
- try {
- resp = consumer.send(req);
- } catch (Exception e) {
- errors += 1;
-
- LOG.error("findLeader error, broker:" + host.toString() + ", will change to next broker index:" + (i + 1) % size);
- if (consumer != null) {
- consumer.close();
- consumer = null;
- }
- continue;
- }
-
- List<TopicMetadata> metaData = resp.topicsMetadata();
- for (TopicMetadata item : metaData) {
- for (PartitionMetadata part : item.partitionsMetadata()) {
- if (part.partitionId() == partition) {
- returnMetaData = part;
- break loop;
- }
- }
- }
-
- } catch (Exception e) {
- LOG.error("Error communicating with Broker:" + host.toString() + ", find Leader for partition:" + partition);
- } finally {
- if (consumer != null) {
- consumer.close();
- consumer = null;
- }
- }
- }
-
- return returnMetaData;
- }
-
- public long getOffset(String topic, int partition, long startOffsetTime) {
- SimpleConsumer simpleConsumer = findLeaderConsumer(partition);
-
- if (simpleConsumer == null) {
- LOG.error("Error consumer is null get offset from partition:" + partition);
- return -1;
- }
-
- TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partition);
- Map<TopicAndPartition, PartitionOffsetRequestInfo> requestInfo = new HashMap<TopicAndPartition, PartitionOffsetRequestInfo>();
- requestInfo.put(topicAndPartition, new PartitionOffsetRequestInfo(startOffsetTime, 1));
- OffsetRequest request = new OffsetRequest(requestInfo, kafka.api.OffsetRequest.CurrentVersion(), simpleConsumer.clientId());
-
- long[] offsets = simpleConsumer.getOffsetsBefore(request).offsets(topic, partition);
- if (offsets.length > 0) {
- return offsets[0];
- } else {
- return NO_OFFSET;
- }
- }
-
- public void close() {
- if (consumer != null) {
- consumer.close();
- }
- }
-
- public SimpleConsumer getConsumer() {
- return consumer;
- }
-
- public void setConsumer(SimpleConsumer consumer) {
- this.consumer = consumer;
- }
-
- public int getStatus() {
- return status;
- }
-
- public void setStatus(int status) {
- this.status = status;
- }
-
- public Broker getLeaderBroker() {
- return leaderBroker;
- }
-
- public void setLeaderBroker(Broker leaderBroker) {
- this.leaderBroker = leaderBroker;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaMessageId.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaMessageId.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaMessageId.java
deleted file mode 100644
index a7fe8ca..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaMessageId.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-public class KafkaMessageId {
- private int partition;
- private long offset;
-
- public KafkaMessageId(int partition, long offset) {
- this.setPartition(partition);
- this.setOffset(offset);
- }
-
- public int getPartition() {
- return partition;
- }
-
- public void setPartition(int partition) {
- this.partition = partition;
- }
-
- public long getOffset() {
- return offset;
- }
-
- public void setOffset(long offset) {
- this.offset = offset;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpout.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpout.java
deleted file mode 100644
index 4fa11fa..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpout.java
+++ /dev/null
@@ -1,124 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import java.util.Collection;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.kafka.PartitionConsumer.EmitState;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-
-public class KafkaSpout implements IRichSpout {
- /**
- *
- */
- private static final long serialVersionUID = 1L;
- private static Logger LOG = LoggerFactory.getLogger(KafkaSpout.class);
-
- protected SpoutOutputCollector collector;
-
- private long lastUpdateMs;
- PartitionCoordinator coordinator;
-
- private KafkaSpoutConfig config;
-
- private ZkState zkState;
-
- public KafkaSpout() {
- config = new KafkaSpoutConfig();
- }
-
- public KafkaSpout(KafkaSpoutConfig config) {
- this.config = config;
- }
-
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- // TODO Auto-generated method stub
- this.collector = collector;
- config.configure(conf);
- zkState = new ZkState(conf, config);
- coordinator = new PartitionCoordinator(conf, config, context, zkState);
- lastUpdateMs = System.currentTimeMillis();
- }
-
- @Override
- public void close() {
- // TODO Auto-generated method stub
- zkState.close();
- }
-
- @Override
- public void activate() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void deactivate() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void nextTuple() {
- Collection<PartitionConsumer> partitionConsumers = coordinator.getPartitionConsumers();
- for(PartitionConsumer consumer: partitionConsumers) {
- EmitState state = consumer.emit(collector);
- LOG.debug("====== partition "+ consumer.getPartition() + " emit message state is "+state);
-// if(state != EmitState.EMIT_MORE) {
-// currentPartitionIndex = (currentPartitionIndex+1) % consumerSize;
-// }
-// if(state != EmitState.EMIT_NONE) {
-// break;
-// }
- }
- long now = System.currentTimeMillis();
- if((now - lastUpdateMs) > config.offsetUpdateIntervalMs) {
- commitState();
- }
-
-
- }
-
- public void commitState() {
- lastUpdateMs = System.currentTimeMillis();
- for(PartitionConsumer consumer: coordinator.getPartitionConsumers()) {
- consumer.commitState();
- }
-
- }
-
- @Override
- public void ack(Object msgId) {
- KafkaMessageId messageId = (KafkaMessageId)msgId;
- PartitionConsumer consumer = coordinator.getConsumer(messageId.getPartition());
- consumer.ack(messageId.getOffset());
- }
-
- @Override
- public void fail(Object msgId) {
- KafkaMessageId messageId = (KafkaMessageId)msgId;
- PartitionConsumer consumer = coordinator.getConsumer(messageId.getPartition());
- consumer.fail(messageId.getOffset());
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("bytes"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpoutConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpoutConfig.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpoutConfig.java
deleted file mode 100644
index 86b6e69..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/KafkaSpoutConfig.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-
-import com.alibaba.jstorm.utils.JStormUtils;
-
-import backtype.storm.spout.MultiScheme;
-import backtype.storm.spout.RawMultiScheme;
-
-
-public class KafkaSpoutConfig implements Serializable {
-
-
- private static final long serialVersionUID = 1L;
-
- public List<Host> brokers;
- public int numPartitions;
- public String topic;
- public String zkRoot;
-
- public List<Host> zkServers;
-
- public int fetchMaxBytes = 256*1024;
- public int fetchWaitMaxMs = 10000;
- public int socketTimeoutMs = 30 * 1000;
- public int socketReceiveBufferBytes = 64*1024;
- public long startOffsetTime = -1;
- public boolean fromBeginning = false;
- public String clientId;
- public boolean resetOffsetIfOutOfRange = false;
- public long offsetUpdateIntervalMs=2000;
- private Properties properties = null;
- private Map stormConf;
- public int batchSendCount = 1;
-
- public KafkaSpoutConfig() {
- }
-
- public KafkaSpoutConfig(Properties properties) {
- this.properties = properties;
- }
-
- public void configure(Map conf) {
- this.stormConf = conf;
- topic = getConfig("kafka.topic", "jstorm");
- zkRoot = getConfig("storm.zookeeper.root", "/jstorm");
-
- String zkHosts = getConfig("kafka.zookeeper.hosts", "127.0.0.1:2181");
- zkServers = convertHosts(zkHosts, 2181);
- String brokerHosts = getConfig("kafka.broker.hosts", "127.0.0.1:9092");
- brokers = convertHosts(brokerHosts, 9092);
-
- numPartitions = JStormUtils.parseInt(getConfig("kafka.broker.partitions"), 1);
- fetchMaxBytes = JStormUtils.parseInt(getConfig("kafka.fetch.max.bytes"), 256*1024);
- fetchWaitMaxMs = JStormUtils.parseInt(getConfig("kafka.fetch.wait.max.ms"), 10000);
- socketTimeoutMs = JStormUtils.parseInt(getConfig("kafka.socket.timeout.ms"), 30 * 1000);
- socketReceiveBufferBytes = JStormUtils.parseInt(getConfig("kafka.socket.receive.buffer.bytes"), 64*1024);
- fromBeginning = JStormUtils.parseBoolean(getConfig("kafka.fetch.from.beginning"), false);
- startOffsetTime = JStormUtils.parseInt(getConfig("kafka.start.offset.time"), -1);
- offsetUpdateIntervalMs = JStormUtils.parseInt(getConfig("kafka.offset.update.interval.ms"), 2000);
- clientId = getConfig("kafka.client.id", "jstorm");
- batchSendCount = JStormUtils.parseInt(getConfig("kafka.spout.batch.send.count"), 1);
- }
-
-
- private String getConfig(String key) {
- return getConfig(key, null);
- }
-
- private String getConfig(String key, String defaultValue) {
- if(properties!=null && properties.containsKey(key)) {
- return properties.getProperty(key);
- }else if(stormConf.containsKey(key)) {
- return String.valueOf(stormConf.get(key));
- }else {
- return defaultValue;
- }
- }
-
-
- public List<Host> convertHosts(String hosts, int defaultPort) {
- List<Host> hostList = new ArrayList<Host>();
- String[] hostArr = hosts.split(",");
- for (String s : hostArr) {
- Host host;
- String[] spec = s.split(":");
- if (spec.length == 1) {
- host = new Host(spec[0],defaultPort);
- } else if (spec.length == 2) {
- host = new Host(spec[0], JStormUtils.parseInt(spec[1]));
- } else {
- throw new IllegalArgumentException("Invalid host specification: " + s);
- }
- hostList.add(host);
- }
- return hostList;
- }
-
-
- public List<Host> getHosts() {
- return brokers;
- }
-
- public void setHosts(List<Host> hosts) {
- this.brokers = hosts;
- }
-
- public int getPartitionsPerBroker() {
- return numPartitions;
- }
-
- public void setPartitionsPerBroker(int partitionsPerBroker) {
- this.numPartitions = partitionsPerBroker;
- }
-
- public String getTopic() {
- return topic;
- }
-
- public void setTopic(String topic) {
- this.topic = topic;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionConsumer.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionConsumer.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionConsumer.java
deleted file mode 100644
index 4b8ad7f..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionConsumer.java
+++ /dev/null
@@ -1,227 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.ImmutableMap;
-
-import kafka.javaapi.message.ByteBufferMessageSet;
-import kafka.message.Message;
-import kafka.message.MessageAndOffset;
-import backtype.storm.Config;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.utils.Utils;
-
-/**
- *
- * @author feilaoda
- *
- */
-public class PartitionConsumer {
- private static Logger LOG = LoggerFactory.getLogger(PartitionConsumer.class);
-
- static enum EmitState {
- EMIT_MORE, EMIT_END, EMIT_NONE
- }
-
- private int partition;
- private KafkaConsumer consumer;
-
-
- private PartitionCoordinator coordinator;
-
- private KafkaSpoutConfig config;
- private LinkedList<MessageAndOffset> emittingMessages = new LinkedList<MessageAndOffset>();
- private SortedSet<Long> pendingOffsets = new TreeSet<Long>();
- private SortedSet<Long> failedOffsets = new TreeSet<Long>();
- private long emittingOffset;
- private long lastCommittedOffset;
- private ZkState zkState;
- private Map stormConf;
-
- public PartitionConsumer(Map conf, KafkaSpoutConfig config, int partition, ZkState offsetState) {
- this.stormConf = conf;
- this.config = config;
- this.partition = partition;
- this.consumer = new KafkaConsumer(config);
- this.zkState = offsetState;
-
- Long jsonOffset = null;
- try {
- Map<Object, Object> json = offsetState.readJSON(zkPath());
- if (json != null) {
- // jsonTopologyId = (String)((Map<Object,Object>)json.get("topology"));
- jsonOffset = (Long) json.get("offset");
- }
- } catch (Throwable e) {
- LOG.warn("Error reading and/or parsing at ZkNode: " + zkPath(), e);
- }
-
- try {
- if (config.fromBeginning) {
- emittingOffset = consumer.getOffset(config.topic, partition, kafka.api.OffsetRequest.EarliestTime());
- } else {
- if (jsonOffset == null) {
- lastCommittedOffset = consumer.getOffset(config.topic, partition, kafka.api.OffsetRequest.LatestTime());
- } else {
- lastCommittedOffset = jsonOffset;
- }
- emittingOffset = lastCommittedOffset;
- }
- } catch (Exception e) {
- LOG.error(e.getMessage(), e);
- }
- }
-
- public EmitState emit(SpoutOutputCollector collector) {
- if (emittingMessages.isEmpty()) {
- fillMessages();
- }
-
- int count = 0;
- while (true) {
- MessageAndOffset toEmitMsg = emittingMessages.pollFirst();
- if (toEmitMsg == null) {
- return EmitState.EMIT_END;
- }
- count ++;
- Iterable<List<Object>> tups = generateTuples(toEmitMsg.message());
-
- if (tups != null) {
- for (List<Object> tuple : tups) {
- LOG.debug("emit message {}", new String(Utils.toByteArray(toEmitMsg.message().payload())));
- collector.emit(tuple, new KafkaMessageId(partition, toEmitMsg.offset()));
- }
- if(count>=config.batchSendCount) {
- break;
- }
- } else {
- ack(toEmitMsg.offset());
- }
- }
-
- if (emittingMessages.isEmpty()) {
- return EmitState.EMIT_END;
- } else {
- return EmitState.EMIT_MORE;
- }
- }
-
- private void fillMessages() {
-
- ByteBufferMessageSet msgs;
- try {
- long start = System.currentTimeMillis();
- msgs = consumer.fetchMessages(partition, emittingOffset + 1);
-
- if (msgs == null) {
- LOG.error("fetch null message from offset {}", emittingOffset);
- return;
- }
-
- int count = 0;
- for (MessageAndOffset msg : msgs) {
- count += 1;
- emittingMessages.add(msg);
- emittingOffset = msg.offset();
- pendingOffsets.add(emittingOffset);
- LOG.debug("fillmessage fetched a message:{}, offset:{}", msg.message().toString(), msg.offset());
- }
- long end = System.currentTimeMillis();
- LOG.info("fetch message from partition:"+partition+", offset:" + emittingOffset+", size:"+msgs.sizeInBytes()+", count:"+count +", time:"+(end-start));
- } catch (Exception e) {
- e.printStackTrace();
- LOG.error(e.getMessage(),e);
- }
- }
-
- public void commitState() {
- try {
- long lastOffset = 0;
- if (pendingOffsets.isEmpty() || pendingOffsets.size() <= 0) {
- lastOffset = emittingOffset;
- } else {
- lastOffset = pendingOffsets.first();
- }
- if (lastOffset != lastCommittedOffset) {
- Map<Object, Object> data = new HashMap<Object, Object>();
- data.put("topology", stormConf.get(Config.TOPOLOGY_NAME));
- data.put("offset", lastOffset);
- data.put("partition", partition);
- data.put("broker", ImmutableMap.of("host", consumer.getLeaderBroker().host(), "port", consumer.getLeaderBroker().port()));
- data.put("topic", config.topic);
- zkState.writeJSON(zkPath(), data);
- lastCommittedOffset = lastOffset;
- }
- } catch (Exception e) {
- LOG.error(e.getMessage(), e);
- }
-
- }
-
- public void ack(long offset) {
- try {
- pendingOffsets.remove(offset);
- } catch (Exception e) {
- LOG.error("offset ack error " + offset);
- }
- }
-
- public void fail(long offset) {
- failedOffsets.remove(offset);
- }
-
- public void close() {
- coordinator.removeConsumer(partition);
- consumer.close();
- }
-
- @SuppressWarnings("unchecked")
- public Iterable<List<Object>> generateTuples(Message msg) {
- Iterable<List<Object>> tups = null;
- ByteBuffer payload = msg.payload();
- if (payload == null) {
- return null;
- }
- tups = Arrays.asList(Utils.tuple(Utils.toByteArray(payload)));
- return tups;
- }
-
- private String zkPath() {
- return config.zkRoot + "/kafka/offset/topic/" + config.topic + "/" + config.clientId + "/" + partition;
- }
-
- public PartitionCoordinator getCoordinator() {
- return coordinator;
- }
-
- public void setCoordinator(PartitionCoordinator coordinator) {
- this.coordinator = coordinator;
- }
-
- public int getPartition() {
- return partition;
- }
-
- public void setPartition(int partition) {
- this.partition = partition;
- }
-
- public KafkaConsumer getConsumer() {
- return consumer;
- }
-
- public void setConsumer(KafkaConsumer consumer) {
- this.consumer = consumer;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionCoordinator.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionCoordinator.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionCoordinator.java
deleted file mode 100644
index 25dc368..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/PartitionCoordinator.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-
-import backtype.storm.task.TopologyContext;
-
-public class PartitionCoordinator {
- private KafkaSpoutConfig config;
- private Map<Integer, PartitionConsumer> partitionConsumerMap;
- private List<PartitionConsumer> partitionConsumers;
-
- private ZkState zkState;
- public PartitionCoordinator(Map conf, KafkaSpoutConfig config, TopologyContext context, ZkState zkState) {
- this.config = config;
- this.zkState = zkState;
- partitionConsumers = new LinkedList<PartitionConsumer>();
- createPartitionConsumers(conf, context);
- }
-
- private void createPartitionConsumers(Map conf, TopologyContext context) {
- partitionConsumerMap = new HashMap<Integer, PartitionConsumer>();
- int taskSize = context.getComponentTasks(context.getThisComponentId()).size();
- for(int i=context.getThisTaskIndex(); i<config.numPartitions; i+=taskSize) {
- PartitionConsumer partitionConsumer = new PartitionConsumer(conf, config, i, zkState);
- partitionConsumers.add(partitionConsumer);
- partitionConsumerMap.put(i, partitionConsumer);
- }
- }
-
- public List<PartitionConsumer> getPartitionConsumers() {
- return partitionConsumers;
- }
-
- public PartitionConsumer getConsumer(int partition) {
- return partitionConsumerMap.get(partition);
- }
-
- public void removeConsumer(int partition) {
- PartitionConsumer partitionConsumer = partitionConsumerMap.get(partition);
- partitionConsumers.remove(partitionConsumer);
- partitionConsumerMap.remove(partition);
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/ZkState.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/ZkState.java b/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/ZkState.java
deleted file mode 100644
index ac512e5..0000000
--- a/jstorm-utility/jstorm-kafka/src/main/java/com/alibaba/jstorm/kafka/ZkState.java
+++ /dev/null
@@ -1,95 +0,0 @@
-package com.alibaba.jstorm.kafka;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-import java.nio.charset.Charset;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.framework.api.CreateBuilder;
-import org.apache.curator.framework.api.ProtectACLCreateModePathAndBytesable;
-import org.apache.curator.retry.RetryNTimes;
-import org.apache.zookeeper.CreateMode;
-import org.json.simple.JSONValue;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ZkState {
- public static final Logger LOG = LoggerFactory.getLogger(ZkState.class);
- CuratorFramework _curator;
-
- private CuratorFramework newCurator(Map conf, KafkaSpoutConfig config) throws Exception {
- String serverPorts = "";
- List<Host> zkServers = config.zkServers;
- for (Host server : zkServers) {
- serverPorts = serverPorts + server.getHost() + ":" + server.getPort() + ",";
- }
- return CuratorFrameworkFactory.newClient(serverPorts, Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_SESSION_TIMEOUT)), 15000, new RetryNTimes(
- Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_RETRY_TIMES)), Utils.getInt(conf.get(Config.STORM_ZOOKEEPER_RETRY_INTERVAL))));
- }
-
- public CuratorFramework getCurator() {
- assert _curator != null;
- return _curator;
- }
-
- public ZkState(Map stateConf, KafkaSpoutConfig config) {
- try {
- _curator = newCurator(stateConf, config);
- _curator.start();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void writeJSON(String path, Map<Object, Object> data) {
- LOG.info("Writing " + path + " the data " + data.toString());
- writeBytes(path, JSONValue.toJSONString(data).getBytes(Charset.forName("UTF-8")));
- }
-
- public void writeBytes(String path, byte[] bytes) {
- try {
- if (_curator.checkExists().forPath(path) == null) {
- CreateBuilder builder = _curator.create();
- ProtectACLCreateModePathAndBytesable<String> createAble = (ProtectACLCreateModePathAndBytesable<String>) builder
- .creatingParentsIfNeeded();
- createAble.withMode(CreateMode.PERSISTENT).forPath(path, bytes);
- } else {
- _curator.setData().forPath(path, bytes);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public Map<Object, Object> readJSON(String path) {
- try {
- byte[] b = readBytes(path);
- if (b == null)
- return null;
- return (Map<Object, Object>) JSONValue.parse(new String(b, "UTF-8"));
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public byte[] readBytes(String path) {
- try {
- if (_curator.checkExists().forPath(path) != null) {
- return _curator.getData().forPath(path);
- } else {
- return null;
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void close() {
- _curator.close();
- _curator = null;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-kafka/src/test/java/com/alibaba/jstorm/test/kafka/KafkaTest.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-kafka/src/test/java/com/alibaba/jstorm/test/kafka/KafkaTest.java b/jstorm-utility/jstorm-kafka/src/test/java/com/alibaba/jstorm/test/kafka/KafkaTest.java
deleted file mode 100644
index a63adec..0000000
--- a/jstorm-utility/jstorm-kafka/src/test/java/com/alibaba/jstorm/test/kafka/KafkaTest.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.alibaba.jstorm.test.kafka;
-
-import java.util.Properties;
-
-import kafka.server.KafkaServerStartable;
-
-import com.netflix.curator.framework.CuratorFramework;
-import com.netflix.curator.framework.CuratorFrameworkFactory;
-import com.netflix.curator.retry.ExponentialBackoffRetry;
-import com.netflix.curator.test.TestingServer;
-
-public class KafkaTest {
- private final int port = 49123;
- private KafkaServerStartable kafka;
- private TestingServer server;
- private String zookeeperConnectionString;
-
- public KafkaTest() {}
-
- public void run() {
- try {
- server = new TestingServer();
- zookeeperConnectionString = server.getConnectString();
- ExponentialBackoffRetry retryPolicy = new ExponentialBackoffRetry(
- 1000, 3);
- CuratorFramework zookeeper = CuratorFrameworkFactory.newClient(
- zookeeperConnectionString, retryPolicy);
- zookeeper.start();
- Properties p = new Properties();
- p.setProperty("zookeeper.connect", zookeeperConnectionString);
- p.setProperty("broker.id", "0");
- p.setProperty("port", "" + port);
- kafka.server.KafkaConfig config = new kafka.server.KafkaConfig(p);
- kafka = new KafkaServerStartable(config);
- kafka.startup();
- } catch (Exception ex) {
- throw new RuntimeException("Could not start test broker", ex);
- }
- }
-
- public String getBrokerConnectionString() {
- return "localhost:" + port;
- }
-
- public int getPort() {
- return port;
- }
-
- public void shutdown() {
- kafka.shutdown();
- }
-
- public static void main(String[] args) {
- KafkaTest test = new KafkaTest();
- test.run();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/pom.xml b/jstorm-utility/jstorm-rocket-mq/pom.xml
deleted file mode 100644
index ffc611e..0000000
--- a/jstorm-utility/jstorm-rocket-mq/pom.xml
+++ /dev/null
@@ -1,94 +0,0 @@
-<?xml version="1.0"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>metaspout</artifactId>
- <version>0.2.0-SNAPSHOT</version>
-
- <properties>
- <jstorm.version>0.9.6.1</jstorm.version>
- </properties>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
- <dependencies>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.4</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.taobao.metaq.final</groupId>
- <artifactId>metaq-client</artifactId>
- <version>3.1.8</version>
- </dependency>
- <!--
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-common</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-client</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-remoting</artifactId>
- <version>3.0.1</version>
- </dependency>
- -->
-
- </dependencies>
-
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaClientConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaClientConfig.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaClientConfig.java
deleted file mode 100644
index f8a9c9c..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaClientConfig.java
+++ /dev/null
@@ -1,263 +0,0 @@
-package com.alibaba.aloha.meta;
-
-import java.io.Serializable;
-import java.util.Date;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.jstorm.utils.TimeFormat;
-
-/**
- * Meta Spout Setting
- *
- * All needed configs must prepare before submit topopoly
- *
- * @author zhongyan.feng/zhiyuan.ls
- */
-public class MetaClientConfig implements Serializable {
-
- private static final long serialVersionUID = 4157424979688593280L;
-
- public static final String META_TOPIC = "meta.topic";
- public static final String META_CONSUMER_GROUP = "meta.consumer.group";
- public static final String META_SUBEXPRESS = "meta.subexpress";
- public static final String META_NAMESERVER = "meta.nameserver";
- //pull interval(ms) from meta server
- public static final String META_PULL_INTERVAL = "meta.pull.interval.ms";
- // max fail times
- public static final String META_MAX_FAIL_TIMES = "meta.max.fail.times";
- // meta client internal queue size
- public static final String META_INTERNAL_QUEUE_SIZE = "meta.internal.queue.size";
- // spout send one batch size
- public static final String META_BATCH_SEND_MSG_SIZE = "meta.batch.send.msg.size";
- // meta client pull batch size from meta server
- public static final String META_BATCH_PULL_MSG_SIZE = "meta.batch.pull.msg.size";
- // meta client pull thread num
- public static final String META_PULL_THREAD_NUM = "meta.pull.thread.num";
- // meta message automatically ack
- public static final String META_SPOUT_AUTO_ACK = "meta.spout.auto.ack";
- // enable meta spout flow control
- public static final String META_SPOUT_FLOW_CONTROL= "meta.spout.flow.control";
-
- // format is "yyyyMMddHHmmss"
- // set the meta client offset from this timestamp
- public static final String META_CONSUMER_START_TIMESTAMP = "meta.consumer.start.timestamp";
- public static final String META_EXTRA_PROPERTIES = "meta.extra.properties";
-
-
- private final String consumerGroup;
-
- /**
- * Alipay need set nameServer, taobao don't need set this field
- */
- private final String nameServer;
-
- private final String topic;
-
- private final String subExpress;
-
- /**
- * The max allowed failures for one single message, skip the failure message
- * if excesses
- *
- * -1 means try again until success
- */
- private int maxFailTimes = DEFAULT_FAIL_TIME;
- public static final int DEFAULT_FAIL_TIME = 5;
-
- /**
- * Local messages threshold, trigger flow control if excesses
- *
- */
- private int queueSize = DEFAULT_QUEUE_SIZE;
- public static final int DEFAULT_QUEUE_SIZE = 256;
-
- /**
- * fetch messages size from local queue
- * it is also sending batch size
- *
- */
- private int sendBatchSize = DEFAULT_BATCH_MSG_NUM;
- public static final int DEFAULT_BATCH_MSG_NUM = 32;
-
- /**
- * pull message size from meta server
- *
- */
- private int pullBatchSize = DEFAULT_BATCH_MSG_NUM;
-
- /**
- * pull interval(ms) from server for every batch
- *
- */
- private long pullInterval = 0;
-
- /**
- * pull threads num
- */
- private int pullThreadNum = DEFAULT_PULL_THREAD_NUM;
- public static int DEFAULT_PULL_THREAD_NUM = 4;
-
- /**
- * Consumer start time Null means start from the last consumption
- * time(CONSUME_FROM_LAST_OFFSET)
- *
- */
- private Date startTimeStamp;
-
- private Properties peroperties;
-
- protected MetaClientConfig(String consumerGroup, String nameServer,
- String topic, String subExpress) {
- this.consumerGroup = consumerGroup;
- this.nameServer = nameServer;
- this.topic = topic;
- this.subExpress = subExpress;
- }
-
- public MetaClientConfig(Map conf) {
- topic = (String) conf.get(META_TOPIC);
- consumerGroup = (String) conf.get(META_CONSUMER_GROUP);
- subExpress = (String) conf.get(META_SUBEXPRESS);
- if (StringUtils.isBlank((String) conf.get(META_NAMESERVER)) == false) {
- nameServer = (String) conf.get(META_NAMESERVER);
- }else {
- nameServer = null;
- }
-
- maxFailTimes = JStormUtils.parseInt(conf.get(META_MAX_FAIL_TIMES),
- DEFAULT_FAIL_TIME);
-
- queueSize = JStormUtils.parseInt(conf.get(META_INTERNAL_QUEUE_SIZE),
- DEFAULT_QUEUE_SIZE);
-
- sendBatchSize = JStormUtils.parseInt(conf.get(META_BATCH_SEND_MSG_SIZE),
- DEFAULT_BATCH_MSG_NUM);
-
- pullBatchSize = JStormUtils.parseInt(conf.get(META_BATCH_PULL_MSG_SIZE),
- DEFAULT_BATCH_MSG_NUM);
-
- pullInterval = JStormUtils.parseInt(conf.get(META_PULL_INTERVAL), 0);
-
- pullThreadNum = JStormUtils.parseInt(conf.get(META_PULL_THREAD_NUM),
- DEFAULT_PULL_THREAD_NUM);
-
- String ts = (String)conf.get(META_CONSUMER_START_TIMESTAMP);
- if (ts != null) {
- Date date = null;
- try {
- date = TimeFormat.getSecond(ts);
- }catch(Exception e) {
-
- }
-
- if (date != null) {
- startTimeStamp = date;
- }
- }
-
- Object prop = conf.get(META_EXTRA_PROPERTIES);
- if (prop != null && prop instanceof Properties) {
- peroperties = (Properties)prop;
- }
- }
-
- public static MetaClientConfig mkInstance(Map conf) {
-
- return new MetaClientConfig(conf);
- }
-
-
-
- public int getMaxFailTimes() {
- return maxFailTimes;
- }
-
- public void setMaxFailTimes(int maxFailTimes) {
- this.maxFailTimes = maxFailTimes;
- }
-
- public int getQueueSize() {
- return queueSize;
- }
-
- public void setQueueSize(int queueSize) {
- this.queueSize = queueSize;
- }
-
- public int getSendBatchSize() {
- return sendBatchSize;
- }
-
- public void setSendBatchSize(int sendBatchSize) {
- this.sendBatchSize = sendBatchSize;
- }
-
- public int getPullBatchSize() {
- return pullBatchSize;
- }
-
- public void setPullBatchSize(int pullBatchSize) {
- this.pullBatchSize = pullBatchSize;
- }
-
- public long getPullInterval() {
- return pullInterval;
- }
-
- public void setPullInterval(long pullInterval) {
- this.pullInterval = pullInterval;
- }
-
- public int getPullThreadNum() {
- return pullThreadNum;
- }
-
- public void setPullThreadNum(int pullThreadNum) {
- this.pullThreadNum = pullThreadNum;
- }
-
- public Date getStartTimeStamp() {
- return startTimeStamp;
- }
-
- public void setStartTimeStamp(Date startTimeStamp) {
- this.startTimeStamp = startTimeStamp;
- }
-
- public Properties getPeroperties() {
- return peroperties;
- }
-
- public void setPeroperties(Properties peroperties) {
- this.peroperties = peroperties;
- }
-
- public String getConsumerGroup() {
- return consumerGroup;
- }
-
- public String getNameServer() {
- return nameServer;
- }
-
- public String getTopic() {
- return topic;
- }
-
- public String getSubExpress() {
- return subExpress;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaConsumerFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaConsumerFactory.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaConsumerFactory.java
deleted file mode 100644
index 52db507..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaConsumerFactory.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package com.alibaba.aloha.meta;
-
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.rocketmq.client.consumer.listener.MessageListenerConcurrently;
-import com.alibaba.rocketmq.common.consumer.ConsumeFromWhere;
-import com.alibaba.rocketmq.common.protocol.heartbeat.MessageModel;
-import com.taobao.metaq.client.MetaHelper;
-import com.taobao.metaq.client.MetaPushConsumer;
-
-public class MetaConsumerFactory {
-
- private static final Logger LOG = Logger.getLogger(MetaConsumerFactory.class);
-
-
- private static final long serialVersionUID = 4641537253577312163L;
-
- public static Map<String, MetaPushConsumer> consumers =
- new HashMap<String, MetaPushConsumer>();
-
- public static synchronized MetaPushConsumer mkInstance(MetaClientConfig config,
- MessageListenerConcurrently listener) throws Exception{
-
- String topic = config.getTopic();
- String groupId = config.getConsumerGroup();
-
- String key = topic + "@" + groupId;
-
- MetaPushConsumer consumer = consumers.get(key);
- if (consumer != null) {
-
- LOG.info("Consumer of " + key + " has been created, don't recreate it ");
-
- //Attention, this place return null to info duplicated consumer
- return null;
- }
-
-
- StringBuilder sb = new StringBuilder();
- sb.append("Begin to init meta client \n");
- sb.append(",configuration:").append(config);
-
- LOG.info(sb.toString());
-
- consumer = new MetaPushConsumer(config.getConsumerGroup());
-
- String nameServer = config.getNameServer();
- if ( nameServer != null) {
- String namekey = "rocketmq.namesrv.domain";
-
- String value = System.getProperty(namekey);
- // this is for alipay
- if (value == null) {
-
- System.setProperty(namekey, nameServer);
- } else if (value.equals(nameServer) == false) {
- throw new Exception(
- "Different nameserver address in the same worker "
- + value + ":" + nameServer);
-
- }
- }
-
- String instanceName = groupId +"@" + JStormUtils.process_pid();
- consumer.setInstanceName(instanceName);
- consumer.setConsumeFromWhere(ConsumeFromWhere.CONSUME_FROM_LAST_OFFSET);
- consumer.subscribe(config.getTopic(), config.getSubExpress());
- consumer.registerMessageListener(listener);
-
- consumer.setPullThresholdForQueue(config.getQueueSize());
- consumer.setConsumeMessageBatchMaxSize(config.getSendBatchSize());
- consumer.setPullBatchSize(config.getPullBatchSize());
- consumer.setPullInterval(config.getPullInterval());
- consumer.setConsumeThreadMin(config.getPullThreadNum());
- consumer.setConsumeThreadMax(config.getPullThreadNum());
-
-
- Date date = config.getStartTimeStamp() ;
- if ( date != null) {
- LOG.info("Begin to reset meta offset to " + date);
- try {
- MetaHelper.resetOffsetByTimestamp(MessageModel.CLUSTERING,
- instanceName, config.getConsumerGroup(),
- config.getTopic(), date.getTime());
- LOG.info("Successfully reset meta offset to " + date);
- }catch(Exception e) {
- LOG.error("Failed to reset meta offset to " + date);
- }
-
- }else {
- LOG.info("Don't reset meta offset ");
- }
-
- consumer.start();
-
- consumers.put(key, consumer);
- LOG.info("Successfully create " + key + " consumer");
-
-
- return consumer;
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaSpout.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaSpout.java
deleted file mode 100644
index e6c3a26..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaSpout.java
+++ /dev/null
@@ -1,248 +0,0 @@
-package com.alibaba.aloha.meta;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-
-import com.alibaba.jstorm.client.metric.MetricClient;
-import com.alibaba.jstorm.client.spout.IAckValueSpout;
-import com.alibaba.jstorm.client.spout.IFailValueSpout;
-import com.alibaba.jstorm.metric.JStormHistogram;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.rocketmq.client.consumer.listener.ConsumeConcurrentlyContext;
-import com.alibaba.rocketmq.client.consumer.listener.ConsumeConcurrentlyStatus;
-import com.alibaba.rocketmq.client.consumer.listener.MessageListenerConcurrently;
-import com.alibaba.rocketmq.common.message.MessageExt;
-import com.taobao.metaq.client.MetaPushConsumer;
-
-public class MetaSpout implements IRichSpout, IAckValueSpout, IFailValueSpout,
- MessageListenerConcurrently {
- /** */
- private static final long serialVersionUID = 8476906628618859716L;
- private static final Logger LOG = Logger.getLogger(MetaSpout.class);
-
- protected MetaClientConfig metaClientConfig;
- protected SpoutOutputCollector collector;
- protected transient MetaPushConsumer consumer;
-
- protected Map conf;
- protected String id;
- protected boolean flowControl;
- protected boolean autoAck;
-
- protected transient LinkedBlockingDeque<MetaTuple> sendingQueue;
-
- protected transient MetricClient metricClient;
- protected transient JStormHistogram waithHistogram;
- protected transient JStormHistogram processHistogram;
-
- public MetaSpout() {
-
- }
-
- public void initMetricClient(TopologyContext context) {
- metricClient = new MetricClient(context);
- waithHistogram = metricClient.registerHistogram("MetaTupleWait", null);
- processHistogram = metricClient.registerHistogram("MetaTupleProcess",
- null);
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- this.conf = conf;
- this.collector = collector;
- this.id = context.getThisComponentId() + ":" + context.getThisTaskId();
- this.sendingQueue = new LinkedBlockingDeque<MetaTuple>();
-
- this.flowControl = JStormUtils.parseBoolean(
- conf.get(MetaClientConfig.META_SPOUT_FLOW_CONTROL), true);
- this.autoAck = JStormUtils.parseBoolean(
- conf.get(MetaClientConfig.META_SPOUT_AUTO_ACK), false);
-
- StringBuilder sb = new StringBuilder();
- sb.append("Begin to init MetaSpout:").append(id);
- sb.append(", flowControl:").append(flowControl);
- sb.append(", autoAck:").append(autoAck);
- LOG.info( sb.toString());
-
- initMetricClient(context);
-
- metaClientConfig = MetaClientConfig.mkInstance(conf);
-
- try {
- consumer = MetaConsumerFactory.mkInstance(metaClientConfig, this);
- } catch (Exception e) {
- LOG.error("Failed to create Meta Consumer ", e);
- throw new RuntimeException("Failed to create MetaConsumer" + id, e);
- }
-
- if (consumer == null) {
- LOG.warn(id
- + " already exist consumer in current worker, don't need to fetch data ");
-
- new Thread(new Runnable() {
-
- @Override
- public void run() {
- while (true) {
- try {
- Thread.sleep(10000);
- } catch (InterruptedException e) {
- break;
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append("Only on meta consumer can be run on one process,");
- sb.append(" but there are mutliple spout consumes with the same topic@groupid meta, so the second one ");
- sb.append(id).append(" do nothing ");
- LOG.info(sb.toString());
- }
- }
- }).start();
- }
-
- LOG.info("Successfully init " + id);
- }
-
- @Override
- public void close() {
- if (consumer != null) {
- consumer.shutdown();
- }
-
- }
-
- @Override
- public void activate() {
- if (consumer != null) {
- consumer.resume();
- }
-
- }
-
- @Override
- public void deactivate() {
- if (consumer != null) {
- consumer.suspend();
- }
- }
-
- public void sendTuple(MetaTuple metaTuple) {
- metaTuple.updateEmitMs();
- collector.emit(new Values(metaTuple), metaTuple.getCreateMs());
- }
-
- @Override
- public void nextTuple() {
- MetaTuple metaTuple = null;
- try {
- metaTuple = sendingQueue.take();
- } catch (InterruptedException e) {
- }
-
- if (metaTuple == null) {
- return;
- }
-
- sendTuple(metaTuple);
-
- }
-
- @Deprecated
- public void ack(Object msgId) {
- LOG.warn("Shouldn't go this function");
- }
-
- @Deprecated
- public void fail(Object msgId) {
- LOG.warn("Shouldn't go this function");
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("MetaTuple"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- @Override
- public void fail(Object msgId, List<Object> values) {
- MetaTuple metaTuple = (MetaTuple) values.get(0);
- AtomicInteger failTimes = metaTuple.getFailureTimes();
-
- int failNum = failTimes.incrementAndGet();
- if (failNum > metaClientConfig.getMaxFailTimes()) {
- LOG.warn("Message " + metaTuple.getMq() + " fail times " + failNum);
- finishTuple(metaTuple);
- return;
- }
-
- if (flowControl) {
- sendingQueue.offer(metaTuple);
- } else {
- sendTuple(metaTuple);
- }
- }
-
- public void finishTuple(MetaTuple metaTuple) {
- waithHistogram.update(metaTuple.getEmitMs() - metaTuple.getCreateMs());
- processHistogram.update(System.currentTimeMillis() - metaTuple.getEmitMs());
- metaTuple.done();
- }
-
- @Override
- public void ack(Object msgId, List<Object> values) {
- MetaTuple metaTuple = (MetaTuple) values.get(0);
- finishTuple(metaTuple);
- }
-
- @Override
- public ConsumeConcurrentlyStatus consumeMessage(List<MessageExt> msgs,
- ConsumeConcurrentlyContext context) {
- try {
- MetaTuple metaTuple = new MetaTuple(msgs, context.getMessageQueue());
-
- if (flowControl) {
- sendingQueue.offer(metaTuple);
- } else {
- sendTuple(metaTuple);
- }
-
- if (autoAck) {
- return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
- } else {
- metaTuple.waitFinish();
- if (metaTuple.isSuccess() == true) {
- return ConsumeConcurrentlyStatus.CONSUME_SUCCESS;
- } else {
- return ConsumeConcurrentlyStatus.RECONSUME_LATER;
- }
- }
-
- } catch (Exception e) {
- LOG.error("Failed to emit " + id, e);
- return ConsumeConcurrentlyStatus.RECONSUME_LATER;
- }
-
- }
-
- public MetaPushConsumer getConsumer() {
- return consumer;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaTuple.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaTuple.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaTuple.java
deleted file mode 100644
index d735749..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/MetaTuple.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package com.alibaba.aloha.meta;
-
-import java.io.Serializable;
-import java.util.List;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-import com.alibaba.rocketmq.common.message.MessageExt;
-import com.alibaba.rocketmq.common.message.MessageQueue;
-
-public class MetaTuple implements Serializable {
-
- /** */
- private static final long serialVersionUID = 2277714452693486955L;
-
- protected final List<MessageExt> msgList;
- protected final MessageQueue mq;
-
- protected final AtomicInteger failureTimes;
- protected final long createMs;
- protected long emitMs;
-
- protected transient CountDownLatch latch;
- protected transient boolean isSuccess;
-
- public MetaTuple(List<MessageExt> msgList, MessageQueue mq) {
- this.msgList = msgList;
- this.mq = mq;
-
- this.failureTimes = new AtomicInteger(0);
- this.createMs = System.currentTimeMillis();
-
- this.latch = new CountDownLatch(1);
- this.isSuccess = false;
- }
-
- public AtomicInteger getFailureTimes() {
- return failureTimes;
- }
-
- public long getCreateMs() {
- return createMs;
- }
-
- public long getEmitMs() {
- return emitMs;
- }
-
- public void updateEmitMs() {
- this.emitMs = System.currentTimeMillis();
- }
-
- public List<MessageExt> getMsgList() {
- return msgList;
- }
-
-
- public MessageQueue getMq() {
- return mq;
- }
-
- public boolean waitFinish() throws InterruptedException {
- return latch.await(4, TimeUnit.HOURS);
- }
-
- public void done() {
- isSuccess = true;
- latch.countDown();
- }
-
- public void fail() {
- isSuccess = false;
- latch.countDown();
- }
-
- public boolean isSuccess() {
- return isSuccess;
- }
-
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/TestTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/TestTopology.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/TestTopology.java
deleted file mode 100644
index 608b54c..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/TestTopology.java
+++ /dev/null
@@ -1,150 +0,0 @@
-package com.alibaba.aloha.meta.example;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import org.apache.log4j.Logger;
-import org.yaml.snakeyaml.Yaml;
-
-import backtype.storm.Config;
-import backtype.storm.LocalCluster;
-import backtype.storm.StormSubmitter;
-import backtype.storm.topology.TopologyBuilder;
-
-import com.alibaba.aloha.meta.MetaSpout;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-/**
- * MonitorTopology
- *
- * @author longda/zhiyuan.ls
- *
- */
-public class TestTopology {
-
- private static Logger LOG = Logger.getLogger(TestTopology.class);
-
- public static String WRITER_COMPONENT = "writer";
-
- public static void main(String[] args) throws Exception {
- if (args.length == 0) {
- System.err.println("Please input configuration file");
- System.exit(-1);
- }
-
- LoadConf(args[0]);
-
- TopologyBuilder builder = setupBuilder();
-
- submitTopology(builder);
-
- }
-
- private static TopologyBuilder setupBuilder() throws Exception {
- TopologyBuilder builder = new TopologyBuilder();
-
- int writerParallel = JStormUtils.parseInt(
- conf.get("topology.writer.parallel"), 1);
-
- int spoutParallel = JStormUtils.parseInt(
- conf.get("topology.spout.parallel"), 1);
-
- builder.setSpout("MetaSpout", new MetaSpout(), spoutParallel);
-
- builder.setBolt(WRITER_COMPONENT, new WriterBolt(), writerParallel)
- .shuffleGrouping("MetaSpout");
-
- return builder;
- }
-
- private static void submitTopology(TopologyBuilder builder) {
- try {
- if (local_mode(conf)) {
-
- LocalCluster cluster = new LocalCluster();
-
- cluster.submitTopology(
- String.valueOf(conf.get("topology.name")), conf,
- builder.createTopology());
-
- Thread.sleep(200000);
-
- cluster.shutdown();
- } else {
- StormSubmitter.submitTopology(
- String.valueOf(conf.get("topology.name")), conf,
- builder.createTopology());
- }
-
- } catch (Exception e) {
- LOG.error(e.getMessage(), e.getCause());
- }
- }
-
- private static Map conf = new HashMap<Object, Object>();
-
- private static void LoadProperty(String prop) {
- Properties properties = new Properties();
-
- try {
- InputStream stream = new FileInputStream(prop);
- properties.load(stream);
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + prop);
- } catch (Exception e1) {
- e1.printStackTrace();
-
- return;
- }
-
- conf.putAll(properties);
- }
-
- private static void LoadYaml(String confPath) {
-
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
-
- conf = (Map) yaml.load(stream);
- if (conf == null || conf.isEmpty() == true) {
- throw new RuntimeException("Failed to read config file");
- }
-
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- return;
- }
-
- private static void LoadConf(String arg) {
- if (arg.endsWith("yaml")) {
- LoadYaml(arg);
- } else {
- LoadProperty(arg);
- }
- }
-
- public static boolean local_mode(Map conf) {
- String mode = (String) conf.get(Config.STORM_CLUSTER_MODE);
- if (mode != null) {
- if (mode.equals("local")) {
- return true;
- }
- }
-
- return false;
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/WriterBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/WriterBolt.java b/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/WriterBolt.java
deleted file mode 100644
index 5eddef9..0000000
--- a/jstorm-utility/jstorm-rocket-mq/src/main/java/com/alibaba/aloha/meta/example/WriterBolt.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package com.alibaba.aloha.meta.example;
-
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-
-import com.alibaba.aloha.meta.MetaTuple;
-
-
-public class WriterBolt implements IRichBolt {
-
- private static final long serialVersionUID = 2495121976857546346L;
-
- private static final Logger LOG = Logger.getLogger(WriterBolt.class);
-
- protected OutputCollector collector;
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- this.collector = collector;
-
- }
-
- public void execute(Tuple tuple) {
- // TODO Auto-generated method stub
- MetaTuple metaTuple = (MetaTuple)tuple.getValue(0);
-
- try {
- LOG.info("Messages:" + metaTuple);
-
- } catch (Exception e) {
- collector.fail(tuple);
- return ;
- //throw new FailedException(e);
- }
-
- collector.ack(tuple);
- }
-
- public void cleanup() {
- // TODO Auto-generated method stub
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- // TODO Auto-generated method stub
-
- }
-
- public Map<String, Object> getComponentConfiguration() {
- // TODO Auto-generated method stub
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/MANIFEST.MF
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/MANIFEST.MF b/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/MANIFEST.MF
deleted file mode 100644
index 458dd09..0000000
--- a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/MANIFEST.MF
+++ /dev/null
@@ -1,5 +0,0 @@
-Manifest-Version: 1.0
-Built-By: basti.lj
-Build-Jdk: 1.6.0_45
-Created-By: Maven Integration for Eclipse
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.properties
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.properties b/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.properties
deleted file mode 100644
index ad01a91..0000000
--- a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.properties
+++ /dev/null
@@ -1,7 +0,0 @@
-#Generated by Maven Integration for Eclipse
-#Mon Nov 03 15:24:06 CST 2014
-version=0.2.0-SNAPSHOT
-groupId=com.alibaba.jstorm
-m2e.projectName=metaspout
-m2e.projectLocation=D\:\\code\\aloha_branch\\github_master\\jstorm\\jstorm-utility\\jstorm-rocket-mq
-artifactId=metaspout
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.xml b/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.xml
deleted file mode 100644
index ffc611e..0000000
--- a/jstorm-utility/jstorm-rocket-mq/target/classes/META-INF/maven/com.alibaba.jstorm/metaspout/pom.xml
+++ /dev/null
@@ -1,94 +0,0 @@
-<?xml version="1.0"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>metaspout</artifactId>
- <version>0.2.0-SNAPSHOT</version>
-
- <properties>
- <jstorm.version>0.9.6.1</jstorm.version>
- </properties>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
- <dependencies>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.4</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.taobao.metaq.final</groupId>
- <artifactId>metaq-client</artifactId>
- <version>3.1.8</version>
- </dependency>
- <!--
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-common</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-client</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-remoting</artifactId>
- <version>3.0.1</version>
- </dependency>
- -->
-
- </dependencies>
-
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/jstorm-rocket-mq/test/main/resources/metaspout.yaml
----------------------------------------------------------------------
diff --git a/jstorm-utility/jstorm-rocket-mq/test/main/resources/metaspout.yaml b/jstorm-utility/jstorm-rocket-mq/test/main/resources/metaspout.yaml
deleted file mode 100644
index f007772..0000000
--- a/jstorm-utility/jstorm-rocket-mq/test/main/resources/metaspout.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-
-#Meta Client Configuration
-# Please refer MetaClientConfig for every setting's details
-meta.topic: "bbl_user"
-meta.consumer.group: "bbl_user"
-meta.subexpress: "*"
-#meta.nameserver: ""
-#meta.pull.interval.ms: 0
-#meta.max.fail.times: 5
-#meta.internal.queue.size: 256
-#meta.batch.send.msg.size: 16
-#meta.batch.pull.msg.size: 32
-#meta.pull.thread.num: 4
-#meta.spout.auto.ack: false
-#meta.spout.flow.contro: true
-#yyyyMMddHHmmss
-meta.consumer.start.timestamp: "20141011000000"
-#meta.extra.properties:
-
-topology.name: test_meta_spout
-topology.version: 1.0.0
-topology.workers: 5
-topology.max.spout.pending: 10
-topology.acker.executors: 1
-
-topology.debug: false
-topology.debug.recv.tuple: false
-storm.cluster.mode: local
-
-topology.spout.parallel: 2
-topology.writer.parallel: 1
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/conf/ons.yaml
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/conf/ons.yaml b/jstorm-utility/ons/conf/ons.yaml
deleted file mode 100644
index 5736422..0000000
--- a/jstorm-utility/ons/conf/ons.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
-#############################################################
-###################### ONS Setting Begin ####################
-Topic: "longdatest"
-SubExpress: "*"
-AccessKey: null
-SecretKey: null
-
-ConsumerId: "CID-LONGDA-123"
-ConsumeThreadNums: 4
-ProducerId: "PID_25770293805-101"
-
-#SendMsgTimeoutMillis:
-#MessageModel:
-#ONSAddr:
-#NAMESRV_ADDR:
-
-###################### ONS Setting End ######################
-#############################################################
-
-
-#############################################################
-############### JStorm Topology Setting Begin ###############
-
-#ons spout enable flow control setting
-# all message will be sent in Spout.nextTuple
-OnsSpoutFlowControl: true
-
-# spout consume one with autoAck mode
-# if disable, consumer offset won't move on until do spout.ack
-OnsSpoutAutoAck: false
-
-# if one message fail times is bigger than the OnsMsgMaxFailTimes
-# it will be thrown
-OnsMsgMaxFailTimes: 5
-
-topology.name: "ons_test"
-topology.consumer.parallel: 1
-topology.producer.parallel: 1
-worker.memory.size: 2147483648
-topology.workers: 1
-topology.acker.executors: 0
-storm.cluster.mode: "local"
-
-############### JStorm Topology Setting End ###############
-#############################################################
-
-
-
-
[17/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/TridentTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/TridentTopologyBuilder.java b/jstorm-client/src/main/java/storm/trident/topology/TridentTopologyBuilder.java
deleted file mode 100644
index 1e75e00..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/TridentTopologyBuilder.java
+++ /dev/null
@@ -1,751 +0,0 @@
-package storm.trident.topology;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.BaseConfigurationDeclarer;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.InputDeclarer;
-import backtype.storm.topology.SpoutDeclarer;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-import storm.trident.spout.BatchSpoutExecutor;
-import storm.trident.spout.IBatchSpout;
-import storm.trident.spout.ICommitterTridentSpout;
-import storm.trident.spout.ITridentSpout;
-import storm.trident.spout.RichSpoutBatchTriggerer;
-import storm.trident.spout.TridentSpoutCoordinator;
-import storm.trident.spout.TridentSpoutExecutor;
-import storm.trident.topology.TridentBoltExecutor.CoordSpec;
-import storm.trident.topology.TridentBoltExecutor.CoordType;
-
-// based on transactional topologies
-public class TridentTopologyBuilder {
- Map<GlobalStreamId, String> _batchIds = new HashMap();
- Map<String, TransactionalSpoutComponent> _spouts = new HashMap();
- Map<String, SpoutComponent> _batchPerTupleSpouts = new HashMap();
- Map<String, Component> _bolts = new HashMap();
-
-
- public SpoutDeclarer setBatchPerTupleSpout(String id, String streamName, IRichSpout spout, Integer parallelism, String batchGroup) {
- Map<String, String> batchGroups = new HashMap();
- batchGroups.put(streamName, batchGroup);
- markBatchGroups(id, batchGroups);
- SpoutComponent c = new SpoutComponent(spout, streamName, parallelism, batchGroup);
- _batchPerTupleSpouts.put(id, c);
- return new SpoutDeclarerImpl(c);
- }
-
- public SpoutDeclarer setSpout(String id, String streamName, String txStateId, IBatchSpout spout, Integer parallelism, String batchGroup) {
- return setSpout(id, streamName, txStateId, new BatchSpoutExecutor(spout), parallelism, batchGroup);
- }
-
- public SpoutDeclarer setSpout(String id, String streamName, String txStateId, ITridentSpout spout, Integer parallelism, String batchGroup) {
- Map<String, String> batchGroups = new HashMap();
- batchGroups.put(streamName, batchGroup);
- markBatchGroups(id, batchGroups);
-
- TransactionalSpoutComponent c = new TransactionalSpoutComponent(spout, streamName, parallelism, txStateId, batchGroup);
- _spouts.put(id, c);
- return new SpoutDeclarerImpl(c);
- }
-
- // map from stream name to batch id
- public BoltDeclarer setBolt(String id, ITridentBatchBolt bolt, Integer parallelism, Set<String> committerBatches, Map<String, String> batchGroups) {
- markBatchGroups(id, batchGroups);
- Component c = new Component(bolt, parallelism, committerBatches);
- _bolts.put(id, c);
- return new BoltDeclarerImpl(c);
-
- }
-
- String masterCoordinator(String batchGroup) {
- return "$mastercoord-" + batchGroup;
- }
-
- static final String SPOUT_COORD_PREFIX = "$spoutcoord-";
-
- public static String spoutCoordinator(String spoutId) {
- return SPOUT_COORD_PREFIX + spoutId;
- }
-
- public static String spoutIdFromCoordinatorId(String coordId) {
- return coordId.substring(SPOUT_COORD_PREFIX.length());
- }
-
- Map<GlobalStreamId, String> fleshOutStreamBatchIds(boolean includeCommitStream) {
- Map<GlobalStreamId, String> ret = new HashMap<GlobalStreamId, String>(_batchIds);
- Set<String> allBatches = new HashSet(_batchIds.values());
- for(String b: allBatches) {
- ret.put(new GlobalStreamId(masterCoordinator(b), MasterBatchCoordinator.BATCH_STREAM_ID), b);
- if(includeCommitStream) {
- ret.put(new GlobalStreamId(masterCoordinator(b), MasterBatchCoordinator.COMMIT_STREAM_ID), b);
- }
- // DO NOT include the success stream as part of the batch. it should not trigger coordination tuples,
- // and is just a metadata tuple to assist in cleanup, should not trigger batch tracking
- }
-
- for(String id: _spouts.keySet()) {
- TransactionalSpoutComponent c = _spouts.get(id);
- if(c.batchGroupId!=null) {
- ret.put(new GlobalStreamId(spoutCoordinator(id), MasterBatchCoordinator.BATCH_STREAM_ID), c.batchGroupId);
- }
- }
-
- //this takes care of setting up coord streams for spouts and bolts
- for(GlobalStreamId s: _batchIds.keySet()) {
- String b = _batchIds.get(s);
- ret.put(new GlobalStreamId(s.get_componentId(), TridentBoltExecutor.COORD_STREAM(b)), b);
- }
-
- return ret;
- }
-
- public StormTopology buildTopology() {
- TopologyBuilder builder = new TopologyBuilder();
- Map<GlobalStreamId, String> batchIdsForSpouts = fleshOutStreamBatchIds(false);
- Map<GlobalStreamId, String> batchIdsForBolts = fleshOutStreamBatchIds(true);
-
- Map<String, List<String>> batchesToCommitIds = new HashMap<String, List<String>>();
- Map<String, List<ITridentSpout>> batchesToSpouts = new HashMap<String, List<ITridentSpout>>();
-
- for(String id: _spouts.keySet()) {
- TransactionalSpoutComponent c = _spouts.get(id);
- if(c.spout instanceof IRichSpout) {
-
- //TODO: wrap this to set the stream name
- builder.setSpout(id, (IRichSpout) c.spout, c.parallelism);
- } else {
- String batchGroup = c.batchGroupId;
- if(!batchesToCommitIds.containsKey(batchGroup)) {
- batchesToCommitIds.put(batchGroup, new ArrayList<String>());
- }
- batchesToCommitIds.get(batchGroup).add(c.commitStateId);
-
- if(!batchesToSpouts.containsKey(batchGroup)) {
- batchesToSpouts.put(batchGroup, new ArrayList<ITridentSpout>());
- }
- batchesToSpouts.get(batchGroup).add((ITridentSpout) c.spout);
-
-
- BoltDeclarer scd =
- builder.setBolt(spoutCoordinator(id), new TridentSpoutCoordinator(c.commitStateId, (ITridentSpout) c.spout))
- .globalGrouping(masterCoordinator(c.batchGroupId), MasterBatchCoordinator.BATCH_STREAM_ID)
- .globalGrouping(masterCoordinator(c.batchGroupId), MasterBatchCoordinator.SUCCESS_STREAM_ID)
- .globalGrouping(masterCoordinator(c.batchGroupId), MasterBatchCoordinator.COMMIT_STREAM_ID);
-
- for(Map m: c.componentConfs) {
- scd.addConfigurations(m);
- }
-
- Map<String, TridentBoltExecutor.CoordSpec> specs = new HashMap();
- specs.put(c.batchGroupId, new CoordSpec());
- BoltDeclarer bd = builder.setBolt(id,
- new TridentBoltExecutor(
- new TridentSpoutExecutor(
- c.commitStateId,
- c.streamName,
- ((ITridentSpout) c.spout)),
- batchIdsForSpouts,
- specs),
- c.parallelism);
- bd.allGrouping(spoutCoordinator(id), MasterBatchCoordinator.BATCH_STREAM_ID);
- bd.allGrouping(masterCoordinator(batchGroup), MasterBatchCoordinator.SUCCESS_STREAM_ID);
- if(c.spout instanceof ICommitterTridentSpout) {
- bd.allGrouping(masterCoordinator(batchGroup), MasterBatchCoordinator.COMMIT_STREAM_ID);
- }
- for(Map m: c.componentConfs) {
- bd.addConfigurations(m);
- }
- }
- }
-
- for(String id: _batchPerTupleSpouts.keySet()) {
- SpoutComponent c = _batchPerTupleSpouts.get(id);
- SpoutDeclarer d = builder.setSpout(id, new RichSpoutBatchTriggerer((IRichSpout) c.spout, c.streamName, c.batchGroupId), c.parallelism);
-
- for(Map conf: c.componentConfs) {
- d.addConfigurations(conf);
- }
- }
-
- for(String id: _bolts.keySet()) {
- Component c = _bolts.get(id);
-
- Map<String, CoordSpec> specs = new HashMap();
-
- for(GlobalStreamId s: getBoltSubscriptionStreams(id)) {
- String batch = batchIdsForBolts.get(s);
- if(!specs.containsKey(batch)) specs.put(batch, new CoordSpec());
- CoordSpec spec = specs.get(batch);
- CoordType ct;
- if(_batchPerTupleSpouts.containsKey(s.get_componentId())) {
- ct = CoordType.single();
- } else {
- ct = CoordType.all();
- }
- spec.coords.put(s.get_componentId(), ct);
- }
-
- for(String b: c.committerBatches) {
- specs.get(b).commitStream = new GlobalStreamId(masterCoordinator(b), MasterBatchCoordinator.COMMIT_STREAM_ID);
- }
-
- BoltDeclarer d = builder.setBolt(id, new TridentBoltExecutor(c.bolt, batchIdsForBolts, specs), c.parallelism);
- for(Map conf: c.componentConfs) {
- d.addConfigurations(conf);
- }
-
- for(InputDeclaration inputDecl: c.declarations) {
- inputDecl.declare(d);
- }
-
- Map<String, Set<String>> batchToComponents = getBoltBatchToComponentSubscriptions(id);
- for(String b: batchToComponents.keySet()) {
- for(String comp: batchToComponents.get(b)) {
- d.directGrouping(comp, TridentBoltExecutor.COORD_STREAM(b));
- }
- }
-
- for(String b: c.committerBatches) {
- d.allGrouping(masterCoordinator(b), MasterBatchCoordinator.COMMIT_STREAM_ID);
- }
- }
-
- for(String batch: batchesToCommitIds.keySet()) {
- List<String> commitIds = batchesToCommitIds.get(batch);
- boolean batchCommit = false;
- builder.setSpout(masterCoordinator(batch), new MasterBatchCoordinator(commitIds, batchesToSpouts.get(batch)));
- }
-
- return builder.createTopology();
- }
-
- private void markBatchGroups(String component, Map<String, String> batchGroups) {
- for(String stream: batchGroups.keySet()) {
- _batchIds.put(new GlobalStreamId(component, stream), batchGroups.get(stream));
- }
- }
-
-
- private static class SpoutComponent {
- public Object spout;
- public Integer parallelism;
- public List<Map> componentConfs = new ArrayList<Map>();
- String batchGroupId;
- String streamName;
-
- public SpoutComponent(Object spout, String streamName, Integer parallelism, String batchGroupId) {
- this.spout = spout;
- this.streamName = streamName;
- this.parallelism = parallelism;
- this.batchGroupId = batchGroupId;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
- }
-
- private static class TransactionalSpoutComponent extends SpoutComponent {
- public String commitStateId;
-
- public TransactionalSpoutComponent(Object spout, String streamName, Integer parallelism, String commitStateId, String batchGroupId) {
- super(spout, streamName, parallelism, batchGroupId);
- this.commitStateId = commitStateId;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE);
- }
- }
-
- private static class Component {
- public ITridentBatchBolt bolt;
- public Integer parallelism;
- public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
- public List<Map> componentConfs = new ArrayList<Map>();
- public Set<String> committerBatches;
-
- public Component(ITridentBatchBolt bolt, Integer parallelism,Set<String> committerBatches) {
- this.bolt = bolt;
- this.parallelism = parallelism;
- this.committerBatches = committerBatches;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this, ToStringStyle.MULTI_LINE_STYLE);
- }
- }
-
- Map<String, Set<String>> getBoltBatchToComponentSubscriptions(String id) {
- Map<String, Set<String>> ret = new HashMap();
- for(GlobalStreamId s: getBoltSubscriptionStreams(id)) {
- String b = _batchIds.get(s);
- if(!ret.containsKey(b)) ret.put(b, new HashSet());
- ret.get(b).add(s.get_componentId());
- }
- return ret;
- }
-
- List<GlobalStreamId> getBoltSubscriptionStreams(String id) {
- List<GlobalStreamId> ret = new ArrayList();
- Component c = _bolts.get(id);
- for(InputDeclaration d: c.declarations) {
- ret.add(new GlobalStreamId(d.getComponent(), d.getStream()));
- }
- return ret;
- }
-
- private static interface InputDeclaration {
- void declare(InputDeclarer declarer);
- String getComponent();
- String getStream();
- }
-
- private class SpoutDeclarerImpl extends BaseConfigurationDeclarer<SpoutDeclarer> implements SpoutDeclarer {
- SpoutComponent _component;
-
- public SpoutDeclarerImpl(SpoutComponent component) {
- _component = component;
- }
-
- @Override
- public SpoutDeclarer addConfigurations(Map conf) {
- _component.componentConfs.add(conf);
- return this;
- }
- }
-
- private class BoltDeclarerImpl extends BaseConfigurationDeclarer<BoltDeclarer> implements BoltDeclarer {
- Component _component;
-
- public BoltDeclarerImpl(Component component) {
- _component = component;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component, final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component, final String streamId, final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, streamId, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component, final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component, final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return null;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component, final String streamId, final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, streamId, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
-
- @Override
- public String getStream() {
- return streamId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer grouping(final GlobalStreamId stream, final Grouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.grouping(stream, grouping);
- }
-
- @Override
- public String getComponent() {
- return stream.get_componentId();
- }
-
- @Override
- public String getStream() {
- return stream.get_streamId();
- }
- });
- return this;
- }
-
- private void addDeclaration(InputDeclaration declaration) {
- _component.declarations.add(declaration);
- }
-
- @Override
- public BoltDeclarer addConfigurations(Map conf) {
- _component.componentConfs.add(conf);
- return this;
- }
-
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/state/RotatingTransactionalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/state/RotatingTransactionalState.java b/jstorm-client/src/main/java/storm/trident/topology/state/RotatingTransactionalState.java
deleted file mode 100644
index 9f22cc7..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/state/RotatingTransactionalState.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package storm.trident.topology.state;
-
-import backtype.storm.utils.Utils;
-import org.apache.zookeeper.KeeperException;
-
-import java.util.HashSet;
-import java.util.List;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-public class RotatingTransactionalState {
- public static interface StateInitializer {
- Object init(long txid, Object lastState);
- }
-
- private TransactionalState _state;
- private String _subdir;
-
- private TreeMap<Long, Object> _curr = new TreeMap<Long, Object>();
-
- public RotatingTransactionalState(TransactionalState state, String subdir) {
- _state = state;
- _subdir = subdir;
- state.mkdir(subdir);
- sync();
- }
-
-
- public Object getLastState() {
- if(_curr.isEmpty()) return null;
- else return _curr.lastEntry().getValue();
- }
-
- public void overrideState(long txid, Object state) {
- _state.setData(txPath(txid), state);
- _curr.put(txid, state);
- }
-
- public void removeState(long txid) {
- if(_curr.containsKey(txid)) {
- _curr.remove(txid);
- _state.delete(txPath(txid));
- }
- }
-
- public Object getState(long txid) {
- return _curr.get(txid);
- }
-
- public Object getState(long txid, StateInitializer init) {
- if(!_curr.containsKey(txid)) {
- SortedMap<Long, Object> prevMap = _curr.headMap(txid);
- SortedMap<Long, Object> afterMap = _curr.tailMap(txid);
-
- Long prev = null;
- if(!prevMap.isEmpty()) prev = prevMap.lastKey();
-
- Object data;
- if(afterMap.isEmpty()) {
- Object prevData;
- if(prev!=null) {
- prevData = _curr.get(prev);
- } else {
- prevData = null;
- }
- data = init.init(txid, prevData);
- } else {
- data = null;
- }
- _curr.put(txid, data);
- _state.setData(txPath(txid), data);
- }
- return _curr.get(txid);
- }
-
- public Object getPreviousState(long txid) {
- SortedMap<Long, Object> prevMap = _curr.headMap(txid);
- if(prevMap.isEmpty()) return null;
- else return prevMap.get(prevMap.lastKey());
- }
-
- public boolean hasCache(long txid) {
- return _curr.containsKey(txid);
- }
-
- /**
- * Returns null if it was created, the value otherwise.
- */
- public Object getStateOrCreate(long txid, StateInitializer init) {
- if(_curr.containsKey(txid)) {
- return _curr.get(txid);
- } else {
- getState(txid, init);
- return null;
- }
- }
-
- public void cleanupBefore(long txid) {
- SortedMap<Long, Object> toDelete = _curr.headMap(txid);
- for(long tx: new HashSet<Long>(toDelete.keySet())) {
- _curr.remove(tx);
- try {
- _state.delete(txPath(tx));
- } catch(RuntimeException e) {
- // Ignore NoNodeExists exceptions because when sync() it may populate _curr with stale data since
- // zookeeper reads are eventually consistent.
- if(!Utils.exceptionCauseIsInstanceOf(KeeperException.NoNodeException.class, e)) {
- throw e;
- }
- }
- }
- }
-
- private void sync() {
- List<String> txids = _state.list(_subdir);
- for(String txid_s: txids) {
- Object data = _state.getData(txPath(txid_s));
- _curr.put(Long.parseLong(txid_s), data);
- }
- }
-
- private String txPath(long tx) {
- return txPath("" + tx);
- }
-
- private String txPath(String tx) {
- return _subdir + "/" + tx;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/state/TransactionalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/state/TransactionalState.java b/jstorm-client/src/main/java/storm/trident/topology/state/TransactionalState.java
deleted file mode 100644
index 44d4282..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/state/TransactionalState.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package storm.trident.topology.state;
-
-
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-public class TransactionalState {
- CuratorFramework _curator;
-
- public static TransactionalState newUserState(Map conf, String id) {
- return new TransactionalState(conf, id, "user");
- }
-
- public static TransactionalState newCoordinatorState(Map conf, String id) {
- return new TransactionalState(conf, id, "coordinator");
- }
-
- protected TransactionalState(Map conf, String id, String subroot) {
- try {
- conf = new HashMap(conf);
- String rootDir = conf.get(Config.TRANSACTIONAL_ZOOKEEPER_ROOT) + "/" + id + "/" + subroot;
- List<String> servers = (List<String>) getWithBackup(conf, Config.TRANSACTIONAL_ZOOKEEPER_SERVERS, Config.STORM_ZOOKEEPER_SERVERS);
- Object port = getWithBackup(conf, Config.TRANSACTIONAL_ZOOKEEPER_PORT, Config.STORM_ZOOKEEPER_PORT);
- CuratorFramework initter = Utils.newCuratorStarted(conf, servers, port);
- try {
- initter.create().creatingParentsIfNeeded().forPath(rootDir);
- } catch(KeeperException.NodeExistsException e) {
-
- }
-
- initter.close();
-
- _curator = Utils.newCuratorStarted(conf, servers, port, rootDir);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void setData(String path, Object obj) {
- path = "/" + path;
- byte[] ser;
- try {
- ser = Utils.to_json(obj).getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- try {
- if(_curator.checkExists().forPath(path)!=null) {
- _curator.setData().forPath(path, ser);
- } else {
- _curator.create()
- .creatingParentsIfNeeded()
- .withMode(CreateMode.PERSISTENT)
- .forPath(path, ser);
- }
- } catch(Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void delete(String path) {
- path = "/" + path;
- try {
- _curator.delete().forPath(path);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public List<String> list(String path) {
- path = "/" + path;
- try {
- if(_curator.checkExists().forPath(path)==null) {
- return new ArrayList<String>();
- } else {
- return _curator.getChildren().forPath(path);
- }
- } catch(Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void mkdir(String path) {
- setData(path, 7);
- }
-
- public Object getData(String path) {
- path = "/" + path;
- try {
- if(_curator.checkExists().forPath(path)!=null) {
- return Utils.from_json(new String(_curator.getData().forPath(path), "UTF-8"));
- } else {
- return null;
- }
- } catch(Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void close() {
- _curator.close();
- }
-
- private Object getWithBackup(Map amap, Object primary, Object backup) {
- Object ret = amap.get(primary);
- if(ret==null) return amap.get(backup);
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/tuple/ComboList.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/tuple/ComboList.java b/jstorm-client/src/main/java/storm/trident/tuple/ComboList.java
deleted file mode 100644
index 0221579..0000000
--- a/jstorm-client/src/main/java/storm/trident/tuple/ComboList.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package storm.trident.tuple;
-
-import java.io.Serializable;
-import java.util.AbstractList;
-import java.util.List;
-import org.apache.commons.lang.builder.ToStringBuilder;
-
-
-public class ComboList extends AbstractList<Object> {
- public static class Factory implements Serializable {
- Pointer[] index;
- int[] sizes;
-
- public Factory(int... sizes) {
- this.sizes = sizes;
- int total = 0;
- for(int size: sizes) {
- total+=size;
- }
- index = new Pointer[total];
- int i=0;
- int j=0;
- for(int size: sizes) {
- for(int z=0; z<size; z++) {
- index[j] = new Pointer(i, z);
- j++;
- }
- i++;
- }
- }
-
- public ComboList create(List[] delegates) {
- if(delegates.length!=sizes.length) {
- throw new RuntimeException("Expected " + sizes.length + " lists, but instead got " + delegates.length + " lists");
- }
- for(int i=0; i<delegates.length; i++) {
- List l = delegates[i];
- if(l==null || l.size() != sizes[i]) {
- throw new RuntimeException("Got unexpected delegates to ComboList: " + ToStringBuilder.reflectionToString(delegates));
- }
- }
- return new ComboList(delegates, index);
- }
- }
-
- private static class Pointer implements Serializable {
- int listIndex;
- int subIndex;
-
- public Pointer(int listIndex, int subIndex) {
- this.listIndex = listIndex;
- this.subIndex = subIndex;
- }
-
- }
-
- Pointer[] _index;
- List[] _delegates;
-
- public ComboList(List[] delegates, Pointer[] index) {
- _index = index;
- _delegates = delegates;
- }
-
- @Override
- public Object get(int i) {
- Pointer ptr = _index[i];
- return _delegates[ptr.listIndex].get(ptr.subIndex);
- }
-
- @Override
- public int size() {
- return _index.length;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/tuple/ConsList.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/tuple/ConsList.java b/jstorm-client/src/main/java/storm/trident/tuple/ConsList.java
deleted file mode 100644
index 72fd3d3..0000000
--- a/jstorm-client/src/main/java/storm/trident/tuple/ConsList.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package storm.trident.tuple;
-
-import java.util.AbstractList;
-import java.util.List;
-
-public class ConsList extends AbstractList<Object> {
- List<Object> _elems;
- Object _first;
-
- public ConsList(Object o, List<Object> elems) {
- _elems = elems;
- _first = o;
- }
-
- @Override
- public Object get(int i) {
- if(i==0) return _first;
- else {
- return _elems.get(i - 1);
- }
- }
-
- @Override
- public int size() {
- return _elems.size() + 1;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/tuple/TridentTuple.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/tuple/TridentTuple.java b/jstorm-client/src/main/java/storm/trident/tuple/TridentTuple.java
deleted file mode 100644
index 9159ce7..0000000
--- a/jstorm-client/src/main/java/storm/trident/tuple/TridentTuple.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package storm.trident.tuple;
-
-import backtype.storm.tuple.ITuple;
-
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-
-public interface TridentTuple extends ITuple, List<Object> {
-
- public static interface Factory extends Serializable {
- Map<String, ValuePointer> getFieldIndex();
- List<String> getOutputFields();
- int numDelegates();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/tuple/TridentTupleView.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/tuple/TridentTupleView.java b/jstorm-client/src/main/java/storm/trident/tuple/TridentTupleView.java
deleted file mode 100644
index 17f3e3f..0000000
--- a/jstorm-client/src/main/java/storm/trident/tuple/TridentTupleView.java
+++ /dev/null
@@ -1,342 +0,0 @@
-package storm.trident.tuple;
-
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import clojure.lang.IPersistentVector;
-import clojure.lang.PersistentVector;
-import clojure.lang.RT;
-import java.util.AbstractList;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.Arrays;
-
-//extends abstractlist so that it can be emitted directly as Storm tuples
-public class TridentTupleView extends AbstractList<Object> implements TridentTuple {
- ValuePointer[] _index;
- Map<String, ValuePointer> _fieldIndex;
- IPersistentVector _delegates;
-
- public static class ProjectionFactory implements Factory {
- Map<String, ValuePointer> _fieldIndex;
- ValuePointer[] _index;
- Factory _parent;
-
- public ProjectionFactory(Factory parent, Fields projectFields) {
- _parent = parent;
- if(projectFields==null) projectFields = new Fields();
- Map<String, ValuePointer> parentFieldIndex = parent.getFieldIndex();
- _fieldIndex = new HashMap<String, ValuePointer>();
- for(String f: projectFields) {
- _fieldIndex.put(f, parentFieldIndex.get(f));
- }
- _index = ValuePointer.buildIndex(projectFields, _fieldIndex);
- }
-
- public TridentTuple create(TridentTuple parent) {
- if(_index.length==0) return EMPTY_TUPLE;
- else return new TridentTupleView(((TridentTupleView)parent)._delegates, _index, _fieldIndex);
- }
-
- @Override
- public Map<String, ValuePointer> getFieldIndex() {
- return _fieldIndex;
- }
-
- @Override
- public int numDelegates() {
- return _parent.numDelegates();
- }
-
- @Override
- public List<String> getOutputFields() {
- return indexToFieldsList(_index);
- }
- }
-
- public static class FreshOutputFactory implements Factory {
- Map<String, ValuePointer> _fieldIndex;
- ValuePointer[] _index;
-
- public FreshOutputFactory(Fields selfFields) {
- _fieldIndex = new HashMap<String, ValuePointer>();
- for(int i=0; i<selfFields.size(); i++) {
- String field = selfFields.get(i);
- _fieldIndex.put(field, new ValuePointer(0, i, field));
- }
- _index = ValuePointer.buildIndex(selfFields, _fieldIndex);
- }
-
- public TridentTuple create(List<Object> selfVals) {
- return new TridentTupleView(PersistentVector.EMPTY.cons(selfVals), _index, _fieldIndex);
- }
-
- @Override
- public Map<String, ValuePointer> getFieldIndex() {
- return _fieldIndex;
- }
-
- @Override
- public int numDelegates() {
- return 1;
- }
-
- @Override
- public List<String> getOutputFields() {
- return indexToFieldsList(_index);
- }
- }
-
- public static class OperationOutputFactory implements Factory {
- Map<String, ValuePointer> _fieldIndex;
- ValuePointer[] _index;
- Factory _parent;
-
- public OperationOutputFactory(Factory parent, Fields selfFields) {
- _parent = parent;
- _fieldIndex = new HashMap(parent.getFieldIndex());
- int myIndex = parent.numDelegates();
- for(int i=0; i<selfFields.size(); i++) {
- String field = selfFields.get(i);
- _fieldIndex.put(field, new ValuePointer(myIndex, i, field));
- }
- List<String> myOrder = new ArrayList<String>(parent.getOutputFields());
-
- Set<String> parentFieldsSet = new HashSet<String>(myOrder);
- for(String f: selfFields) {
- if(parentFieldsSet.contains(f)) {
- throw new IllegalArgumentException(
- "Additive operations cannot add fields with same name as already exists. "
- + "Tried adding " + selfFields + " to " + parent.getOutputFields());
- }
- myOrder.add(f);
- }
-
- _index = ValuePointer.buildIndex(new Fields(myOrder), _fieldIndex);
- }
-
- public TridentTuple create(TridentTupleView parent, List<Object> selfVals) {
- IPersistentVector curr = parent._delegates;
- curr = (IPersistentVector) RT.conj(curr, selfVals);
- return new TridentTupleView(curr, _index, _fieldIndex);
- }
-
- @Override
- public Map<String, ValuePointer> getFieldIndex() {
- return _fieldIndex;
- }
-
- @Override
- public int numDelegates() {
- return _parent.numDelegates() + 1;
- }
-
- @Override
- public List<String> getOutputFields() {
- return indexToFieldsList(_index);
- }
- }
-
- public static class RootFactory implements Factory {
- ValuePointer[] index;
- Map<String, ValuePointer> fieldIndex;
-
- public RootFactory(Fields inputFields) {
- index = new ValuePointer[inputFields.size()];
- int i=0;
- for(String f: inputFields) {
- index[i] = new ValuePointer(0, i, f);
- i++;
- }
- fieldIndex = ValuePointer.buildFieldIndex(index);
- }
-
- public TridentTuple create(Tuple parent) {
- return new TridentTupleView(PersistentVector.EMPTY.cons(parent.getValues()), index, fieldIndex);
- }
-
- @Override
- public Map<String, ValuePointer> getFieldIndex() {
- return fieldIndex;
- }
-
- @Override
- public int numDelegates() {
- return 1;
- }
-
- @Override
- public List<String> getOutputFields() {
- return indexToFieldsList(this.index);
- }
- }
-
- private static List<String> indexToFieldsList(ValuePointer[] index) {
- List<String> ret = new ArrayList<String>();
- for(ValuePointer p: index) {
- ret.add(p.field);
- }
- return ret;
- }
-
- public static TridentTupleView EMPTY_TUPLE = new TridentTupleView(null, new ValuePointer[0], new HashMap());
-
- // index and fieldIndex are precomputed, delegates built up over many operations using persistent data structures
- public TridentTupleView(IPersistentVector delegates, ValuePointer[] index, Map<String, ValuePointer> fieldIndex) {
- _delegates = delegates;
- _index = index;
- _fieldIndex = fieldIndex;
- }
-
- public static TridentTuple createFreshTuple(Fields fields, List<Object> values) {
- FreshOutputFactory factory = new FreshOutputFactory(fields);
- return factory.create(values);
- }
-
- public static TridentTuple createFreshTuple(Fields fields, Object... values) {
- FreshOutputFactory factory = new FreshOutputFactory(fields);
- return factory.create(Arrays.asList(values));
- }
-
- @Override
- public List<Object> getValues() {
- return this;
- }
-
- @Override
- public int size() {
- return _index.length;
- }
-
- @Override
- public boolean contains(String field) {
- return getFields().contains(field);
- }
-
- @Override
- public Fields getFields() {
- return new Fields(indexToFieldsList(_index));
- }
-
- @Override
- public int fieldIndex(String field) {
- return getFields().fieldIndex(field);
- }
-
- @Override
- public List<Object> select(Fields selector) {
- return getFields().select(selector, getValues());
- }
-
- @Override
- public Object get(int i) {
- return getValue(i);
- }
-
- @Override
- public Object getValue(int i) {
- return getValueByPointer(_index[i]);
- }
-
- @Override
- public String getString(int i) {
- return (String) getValue(i);
- }
-
- @Override
- public Integer getInteger(int i) {
- return (Integer) getValue(i);
- }
-
- @Override
- public Long getLong(int i) {
- return (Long) getValue(i);
- }
-
- @Override
- public Boolean getBoolean(int i) {
- return (Boolean) getValue(i);
- }
-
- @Override
- public Short getShort(int i) {
- return (Short) getValue(i);
- }
-
- @Override
- public Byte getByte(int i) {
- return (Byte) getValue(i);
- }
-
- @Override
- public Double getDouble(int i) {
- return (Double) getValue(i);
- }
-
- @Override
- public Float getFloat(int i) {
- return (Float) getValue(i);
- }
-
- @Override
- public byte[] getBinary(int i) {
- return (byte[]) getValue(i);
- }
-
- @Override
- public Object getValueByField(String field) {
- return getValueByPointer(_fieldIndex.get(field));
- }
-
- @Override
- public String getStringByField(String field) {
- return (String) getValueByField(field);
- }
-
- @Override
- public Integer getIntegerByField(String field) {
- return (Integer) getValueByField(field);
- }
-
- @Override
- public Long getLongByField(String field) {
- return (Long) getValueByField(field);
- }
-
- @Override
- public Boolean getBooleanByField(String field) {
- return (Boolean) getValueByField(field);
- }
-
- @Override
- public Short getShortByField(String field) {
- return (Short) getValueByField(field);
- }
-
- @Override
- public Byte getByteByField(String field) {
- return (Byte) getValueByField(field);
- }
-
- @Override
- public Double getDoubleByField(String field) {
- return (Double) getValueByField(field);
- }
-
- @Override
- public Float getFloatByField(String field) {
- return (Float) getValueByField(field);
- }
-
- @Override
- public byte[] getBinaryByField(String field) {
- return (byte[]) getValueByField(field);
- }
-
- private Object getValueByPointer(ValuePointer ptr) {
- return ((List<Object>)_delegates.nth(ptr.delegateIndex)).get(ptr.index);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/tuple/ValuePointer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/tuple/ValuePointer.java b/jstorm-client/src/main/java/storm/trident/tuple/ValuePointer.java
deleted file mode 100644
index 401261e..0000000
--- a/jstorm-client/src/main/java/storm/trident/tuple/ValuePointer.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package storm.trident.tuple;
-
-import backtype.storm.tuple.Fields;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.apache.commons.lang.builder.ToStringBuilder;
-
-public class ValuePointer {
- public static Map<String, ValuePointer> buildFieldIndex(ValuePointer[] pointers) {
- Map<String, ValuePointer> ret = new HashMap<String, ValuePointer>();
- for(ValuePointer ptr: pointers) {
- ret.put(ptr.field, ptr);
- }
- return ret;
- }
-
- public static ValuePointer[] buildIndex(Fields fieldsOrder, Map<String, ValuePointer> pointers) {
- if(fieldsOrder.size()!=pointers.size()) {
- throw new IllegalArgumentException("Fields order must be same length as pointers map");
- }
- ValuePointer[] ret = new ValuePointer[pointers.size()];
- for(int i=0; i<fieldsOrder.size(); i++) {
- ret[i] = pointers.get(fieldsOrder.get(i));
- }
- return ret;
- }
-
- public int delegateIndex;
- protected int index;
- protected String field;
-
- public ValuePointer(int delegateIndex, int index, String field) {
- this.delegateIndex = delegateIndex;
- this.index = index;
- this.field = field;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/util/ErrorEdgeFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/util/ErrorEdgeFactory.java b/jstorm-client/src/main/java/storm/trident/util/ErrorEdgeFactory.java
deleted file mode 100644
index 02cff2a..0000000
--- a/jstorm-client/src/main/java/storm/trident/util/ErrorEdgeFactory.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package storm.trident.util;
-
-import java.io.Serializable;
-import org.jgrapht.EdgeFactory;
-
-public class ErrorEdgeFactory implements EdgeFactory, Serializable {
- @Override
- public Object createEdge(Object v, Object v1) {
- throw new RuntimeException("Edges should be made explicitly");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/util/IndexedEdge.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/util/IndexedEdge.java b/jstorm-client/src/main/java/storm/trident/util/IndexedEdge.java
deleted file mode 100644
index 6201978..0000000
--- a/jstorm-client/src/main/java/storm/trident/util/IndexedEdge.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package storm.trident.util;
-
-import java.io.Serializable;
-
-public class IndexedEdge<T> implements Comparable, Serializable {
- public T source;
- public T target;
- public int index;
-
- public IndexedEdge(T source, T target, int index) {
- this.source = source;
- this.target = target;
- this.index = index;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + index;
- result = prime * result + ((source == null) ? 0 : source.hashCode());
- result = prime * result + ((target == null) ? 0 : target.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- IndexedEdge other = (IndexedEdge) obj;
- if (index != other.index)
- return false;
- if (source == null) {
- if (other.source != null)
- return false;
- } else if (!source.equals(other.source))
- return false;
- if (target == null) {
- if (other.target != null)
- return false;
- } else if (!target.equals(other.target))
- return false;
- return true;
- }
-
- @Override
- public int compareTo(Object t) {
- IndexedEdge other = (IndexedEdge) t;
- return index - other.index;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/util/LRUMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/util/LRUMap.java b/jstorm-client/src/main/java/storm/trident/util/LRUMap.java
deleted file mode 100644
index 8d1a9a3..0000000
--- a/jstorm-client/src/main/java/storm/trident/util/LRUMap.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package storm.trident.util;
-
-import java.util.LinkedHashMap;
-import java.util.Map;
-
-public class LRUMap<A, B> extends LinkedHashMap<A, B> {
- private int _maxSize;
-
- public LRUMap(int maxSize) {
- super(maxSize + 1, 1.0f, true);
- _maxSize = maxSize;
- }
-
- @Override
- protected boolean removeEldestEntry(final Map.Entry<A, B> eldest) {
- return size() > _maxSize;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/util/TridentUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/util/TridentUtils.java b/jstorm-client/src/main/java/storm/trident/util/TridentUtils.java
deleted file mode 100644
index 0059721..0000000
--- a/jstorm-client/src/main/java/storm/trident/util/TridentUtils.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package storm.trident.util;
-
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.thrift7.TBase;
-import org.apache.thrift7.TDeserializer;
-import org.apache.thrift7.TException;
-import org.apache.thrift7.TSerializer;
-import org.jgrapht.DirectedGraph;
-
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.topology.IComponent;
-import backtype.storm.topology.OutputFieldsGetter;
-import backtype.storm.tuple.Fields;
-
-public class TridentUtils {
- public static Fields fieldsUnion(Fields... fields) {
- Set<String> ret = new HashSet<String>();
- for(Fields f: fields) {
- if(f!=null) ret.addAll(f.toList());
- }
- return new Fields(new ArrayList<String>(ret));
- }
-
- public static Fields fieldsConcat(Fields... fields) {
- List<String> ret = new ArrayList<String>();
- for(Fields f: fields) {
- if(f!=null) ret.addAll(f.toList());
- }
- return new Fields(ret);
- }
-
- public static Fields fieldsSubtract(Fields all, Fields minus) {
- Set<String> removeSet = new HashSet<String>(minus.toList());
- List<String> toKeep = new ArrayList<String>();
- for(String s: all.toList()) {
- if(!removeSet.contains(s)) {
- toKeep.add(s);
- }
- }
- return new Fields(toKeep);
- }
-
- public static Fields getSingleOutputStreamFields(IComponent component) {
- OutputFieldsGetter getter = new OutputFieldsGetter();
- component.declareOutputFields(getter);
- Map<String, StreamInfo> declaration = getter.getFieldsDeclaration();
- if(declaration.size()!=1) {
- throw new RuntimeException("Trident only supports components that emit a single stream");
- }
- StreamInfo si = declaration.values().iterator().next();
- if(si.is_direct()) {
- throw new RuntimeException("Trident does not support direct streams");
- }
- return new Fields(si.get_output_fields());
- }
-
- /**
- * Assumes edge contains an index
- */
- public static <T> List<T> getParents(DirectedGraph g, T n) {
- List<IndexedEdge> incoming = new ArrayList(g.incomingEdgesOf(n));
- Collections.sort(incoming);
- List<T> ret = new ArrayList();
- for(IndexedEdge e: incoming) {
- ret.add((T)e.source);
- }
- return ret;
- }
-
- public static <T> List<T> getChildren(DirectedGraph g, T n) {
- List<IndexedEdge> outgoing = new ArrayList(g.outgoingEdgesOf(n));
- List<T> ret = new ArrayList();
- for(IndexedEdge e: outgoing) {
- ret.add((T)e.target);
- }
- return ret;
- }
-
-
- public static <T> T getParent(DirectedGraph g, T n) {
- List<T> parents = getParents(g, n);
- if(parents.size()!=1) {
- throw new RuntimeException("Expected a single parent");
- }
- return parents.get(0);
- }
-
- private static ThreadLocal<TSerializer> threadSer = new ThreadLocal<TSerializer>();
- private static ThreadLocal<TDeserializer> threadDes = new ThreadLocal<TDeserializer>();
-
- public static byte[] thriftSerialize(TBase t) {
- try {
- TSerializer ser = threadSer.get();
- if (ser == null) {
- ser = new TSerializer();
- threadSer.set(ser);
- }
- return ser.serialize(t);
- } catch (TException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static <T> T thriftDeserialize(Class c, byte[] b) {
- try {
- T ret = (T) c.newInstance();
- TDeserializer des = threadDes.get();
- if (des == null) {
- des = new TDeserializer();
- threadDes.set(des);
- }
- des.deserialize((TBase) ret, b);
- return ret;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/__init__.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/__init__.py b/jstorm-client/src/main/py/__init__.py
deleted file mode 100644
index e69de29..0000000
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/DistributedRPC-remote
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/DistributedRPC-remote b/jstorm-client/src/main/py/storm/DistributedRPC-remote
deleted file mode 100644
index 9b7ebd8..0000000
--- a/jstorm-client/src/main/py/storm/DistributedRPC-remote
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-import sys
-import pprint
-from urlparse import urlparse
-from thrift.transport import TTransport
-from thrift.transport import TSocket
-from thrift.transport import THttpClient
-from thrift.protocol import TBinaryProtocol
-
-import DistributedRPC
-from ttypes import *
-
-if len(sys.argv) <= 1 or sys.argv[1] == '--help':
- print ''
- print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
- print ''
- print 'Functions:'
- print ' string execute(string functionName, string funcArgs)'
- print ''
- sys.exit(0)
-
-pp = pprint.PrettyPrinter(indent = 2)
-host = 'localhost'
-port = 9090
-uri = ''
-framed = False
-http = False
-argi = 1
-
-if sys.argv[argi] == '-h':
- parts = sys.argv[argi+1].split(':')
- host = parts[0]
- port = int(parts[1])
- argi += 2
-
-if sys.argv[argi] == '-u':
- url = urlparse(sys.argv[argi+1])
- parts = url[1].split(':')
- host = parts[0]
- if len(parts) > 1:
- port = int(parts[1])
- else:
- port = 80
- uri = url[2]
- if url[4]:
- uri += '?%s' % url[4]
- http = True
- argi += 2
-
-if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
- framed = True
- argi += 1
-
-cmd = sys.argv[argi]
-args = sys.argv[argi+1:]
-
-if http:
- transport = THttpClient.THttpClient(host, port, uri)
-else:
- socket = TSocket.TSocket(host, port)
- if framed:
- transport = TTransport.TFramedTransport(socket)
- else:
- transport = TTransport.TBufferedTransport(socket)
-protocol = TBinaryProtocol.TBinaryProtocol(transport)
-client = DistributedRPC.Client(protocol)
-transport.open()
-
-if cmd == 'execute':
- if len(args) != 2:
- print 'execute requires 2 args'
- sys.exit(1)
- pp.pprint(client.execute(args[0],args[1],))
-
-else:
- print 'Unrecognized method %s' % cmd
- sys.exit(1)
-
-transport.close()
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/DistributedRPC.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/DistributedRPC.py b/jstorm-client/src/main/py/storm/DistributedRPC.py
deleted file mode 100644
index a7e6ef9..0000000
--- a/jstorm-client/src/main/py/storm/DistributedRPC.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-from thrift.Thrift import *
-from ttypes import *
-from thrift.Thrift import TProcessor
-from thrift.transport import TTransport
-from thrift.protocol import TBinaryProtocol, TProtocol
-try:
- from thrift.protocol import fastbinary
-except:
- fastbinary = None
-
-
-class Iface:
- def execute(self, functionName, funcArgs):
- """
- Parameters:
- - functionName
- - funcArgs
- """
- pass
-
-
-class Client(Iface):
- def __init__(self, iprot, oprot=None):
- self._iprot = self._oprot = iprot
- if oprot is not None:
- self._oprot = oprot
- self._seqid = 0
-
- def execute(self, functionName, funcArgs):
- """
- Parameters:
- - functionName
- - funcArgs
- """
- self.send_execute(functionName, funcArgs)
- return self.recv_execute()
-
- def send_execute(self, functionName, funcArgs):
- self._oprot.writeMessageBegin('execute', TMessageType.CALL, self._seqid)
- args = execute_args()
- args.functionName = functionName
- args.funcArgs = funcArgs
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_execute(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = execute_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "execute failed: unknown result");
-
-
-class Processor(Iface, TProcessor):
- def __init__(self, handler):
- self._handler = handler
- self._processMap = {}
- self._processMap["execute"] = Processor.process_execute
-
- def process(self, iprot, oprot):
- (name, type, seqid) = iprot.readMessageBegin()
- if name not in self._processMap:
- iprot.skip(TType.STRUCT)
- iprot.readMessageEnd()
- x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
- oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
- x.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
- return
- else:
- self._processMap[name](self, seqid, iprot, oprot)
- return True
-
- def process_execute(self, seqid, iprot, oprot):
- args = execute_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = execute_result()
- try:
- result.success = self._handler.execute(args.functionName, args.funcArgs)
- except DRPCExecutionException, e:
- result.e = e
- oprot.writeMessageBegin("execute", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
-
-# HELPER FUNCTIONS AND STRUCTURES
-
-class execute_args:
- """
- Attributes:
- - functionName
- - funcArgs
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'functionName', None, None, ), # 1
- (2, TType.STRING, 'funcArgs', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.functionName) + hash(self.funcArgs)
-
- def __init__(self, functionName=None, funcArgs=None,):
- self.functionName = functionName
- self.funcArgs = funcArgs
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.functionName = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.funcArgs = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('execute_args')
- if self.functionName is not None:
- oprot.writeFieldBegin('functionName', TType.STRING, 1)
- oprot.writeString(self.functionName.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.funcArgs is not None:
- oprot.writeFieldBegin('funcArgs', TType.STRING, 2)
- oprot.writeString(self.funcArgs.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class execute_result:
- """
- Attributes:
- - success
- - e
- """
-
- thrift_spec = (
- (0, TType.STRING, 'success', None, None, ), # 0
- (1, TType.STRUCT, 'e', (DRPCExecutionException, DRPCExecutionException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.success) + hash(self.e)
-
- def __init__(self, success=None, e=None,):
- self.success = success
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 0:
- if ftype == TType.STRING:
- self.success = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 1:
- if ftype == TType.STRUCT:
- self.e = DRPCExecutionException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('execute_result')
- if self.success is not None:
- oprot.writeFieldBegin('success', TType.STRING, 0)
- oprot.writeString(self.success.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/DistributedRPCInvocations-remote
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/DistributedRPCInvocations-remote b/jstorm-client/src/main/py/storm/DistributedRPCInvocations-remote
deleted file mode 100644
index 5235dfe..0000000
--- a/jstorm-client/src/main/py/storm/DistributedRPCInvocations-remote
+++ /dev/null
@@ -1,99 +0,0 @@
-#!/usr/bin/env python
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-import sys
-import pprint
-from urlparse import urlparse
-from thrift.transport import TTransport
-from thrift.transport import TSocket
-from thrift.transport import THttpClient
-from thrift.protocol import TBinaryProtocol
-
-import DistributedRPCInvocations
-from ttypes import *
-
-if len(sys.argv) <= 1 or sys.argv[1] == '--help':
- print ''
- print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
- print ''
- print 'Functions:'
- print ' void result(string id, string result)'
- print ' DRPCRequest fetchRequest(string functionName)'
- print ' void failRequest(string id)'
- print ''
- sys.exit(0)
-
-pp = pprint.PrettyPrinter(indent = 2)
-host = 'localhost'
-port = 9090
-uri = ''
-framed = False
-http = False
-argi = 1
-
-if sys.argv[argi] == '-h':
- parts = sys.argv[argi+1].split(':')
- host = parts[0]
- port = int(parts[1])
- argi += 2
-
-if sys.argv[argi] == '-u':
- url = urlparse(sys.argv[argi+1])
- parts = url[1].split(':')
- host = parts[0]
- if len(parts) > 1:
- port = int(parts[1])
- else:
- port = 80
- uri = url[2]
- if url[4]:
- uri += '?%s' % url[4]
- http = True
- argi += 2
-
-if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
- framed = True
- argi += 1
-
-cmd = sys.argv[argi]
-args = sys.argv[argi+1:]
-
-if http:
- transport = THttpClient.THttpClient(host, port, uri)
-else:
- socket = TSocket.TSocket(host, port)
- if framed:
- transport = TTransport.TFramedTransport(socket)
- else:
- transport = TTransport.TBufferedTransport(socket)
-protocol = TBinaryProtocol.TBinaryProtocol(transport)
-client = DistributedRPCInvocations.Client(protocol)
-transport.open()
-
-if cmd == 'result':
- if len(args) != 2:
- print 'result requires 2 args'
- sys.exit(1)
- pp.pprint(client.result(args[0],args[1],))
-
-elif cmd == 'fetchRequest':
- if len(args) != 1:
- print 'fetchRequest requires 1 args'
- sys.exit(1)
- pp.pprint(client.fetchRequest(args[0],))
-
-elif cmd == 'failRequest':
- if len(args) != 1:
- print 'failRequest requires 1 args'
- sys.exit(1)
- pp.pprint(client.failRequest(args[0],))
-
-else:
- print 'Unrecognized method %s' % cmd
- sys.exit(1)
-
-transport.close()
[14/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/constants.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/constants.py b/jstorm-client/src/main/py/storm/constants.py
deleted file mode 100644
index 732b368..0000000
--- a/jstorm-client/src/main/py/storm/constants.py
+++ /dev/null
@@ -1,9 +0,0 @@
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-from thrift.Thrift import *
-from ttypes import *
-
[52/60] [abbrv] storm git commit: removed jstorm-on-yarn subdirectory
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/AuthUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/AuthUtils.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/AuthUtils.java
deleted file mode 100644
index b7eecfd..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/AuthUtils.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-import backtype.storm.Config;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.AppConfigurationEntry;
-import java.security.NoSuchAlgorithmException;
-import java.security.URIParameter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.util.Map;
-
-public class AuthUtils {
- private static final Logger LOG = LoggerFactory.getLogger(AuthUtils.class);
- public static final String LOGIN_CONTEXT_SERVER = "StormServer";
- public static final String LOGIN_CONTEXT_CLIENT = "StormClient";
- public static final String SERVICE = "storm_thrift_server";
-
- /**
- * Construct a JAAS configuration object per storm configuration file
- * @param storm_conf Storm configuration
- * @return JAAS configuration object
- */
- public static Configuration GetConfiguration(Map storm_conf) {
- Configuration login_conf = null;
-
- //find login file configuration from Storm configuration
- String loginConfigurationFile = (String)storm_conf.get("java.security.auth.login.config");
- if ((loginConfigurationFile != null) && (loginConfigurationFile.length()>0)) {
- try {
- URI config_uri = new File(loginConfigurationFile).toURI();
- login_conf = Configuration.getInstance("JavaLoginConfig", new URIParameter(config_uri));
- } catch (NoSuchAlgorithmException ex1) {
- if (ex1.getCause() instanceof FileNotFoundException)
- throw new RuntimeException("configuration file "+loginConfigurationFile+" could not be found");
- else throw new RuntimeException(ex1);
- } catch (Exception ex2) {
- throw new RuntimeException(ex2);
- }
- }
-
- return login_conf;
- }
-
- /**
- * Construct a transport plugin per storm configuration
- * @param conf storm configuration
- * @return
- */
- public static ITransportPlugin GetTransportPlugin(Map storm_conf, Configuration login_conf) {
- ITransportPlugin transportPlugin = null;
- try {
- String transport_plugin_klassName = (String) storm_conf.get(Config.STORM_THRIFT_TRANSPORT_PLUGIN);
- Class klass = Class.forName(transport_plugin_klassName);
- transportPlugin = (ITransportPlugin)klass.newInstance();
- transportPlugin.prepare(storm_conf, login_conf);
- } catch(Exception e) {
- throw new RuntimeException(e);
- }
- return transportPlugin;
- }
-
- public static String get(Configuration configuration, String section, String key) throws IOException {
- AppConfigurationEntry configurationEntries[] = configuration.getAppConfigurationEntry(section);
- if (configurationEntries == null) {
- String errorMessage = "Could not find a '"+ section + "' entry in this configuration.";
- throw new IOException(errorMessage);
- }
-
- for(AppConfigurationEntry entry: configurationEntries) {
- Object val = entry.getOptions().get(key);
- if (val != null)
- return (String)val;
- }
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ITransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ITransportPlugin.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ITransportPlugin.java
deleted file mode 100644
index 0c06bf1..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ITransportPlugin.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-import java.io.IOException;
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.server.TServer;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-
-/**
- * Interface for Thrift Transport plugin
- */
-public interface ITransportPlugin {
- /**
- * Invoked once immediately after construction
- * @param storm_conf Storm configuration
- * @param login_conf login configuration
- */
- void prepare(Map storm_conf, Configuration login_conf);
-
- /**
- * Create a server associated with a given port and service handler
- * @param port listening port
- * @param processor service handler
- * @return server to be binded
- */
- public TServer getServer(int port, TProcessor processor) throws IOException, TTransportException;
-
- /**
- * Connect to the specified server via framed transport
- * @param transport The underlying Thrift transport.
- * @param serverHost server host
- */
- public TTransport connect(TTransport transport, String serverHost) throws IOException, TTransportException;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ReqContext.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ReqContext.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ReqContext.java
deleted file mode 100644
index ae10096..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ReqContext.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.net.InetAddress;
-import com.google.common.annotations.VisibleForTesting;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.Principal;
-import javax.security.auth.Subject;
-
-/**
- * context request context includes info about
- * (1) remote address,
- * (2) remote subject and primary principal
- * (3) request ID
- */
-public class ReqContext {
- private static final AtomicInteger uniqueId = new AtomicInteger(0);
- private Subject _subject;
- private InetAddress _remoteAddr;
- private Integer _reqID;
- private Map _storm_conf;
-
- /**
- * Get a request context associated with current thread
- * @return
- */
- public static ReqContext context() {
- return ctxt.get();
- }
-
- //each thread will have its own request context
- private static final ThreadLocal < ReqContext > ctxt =
- new ThreadLocal < ReqContext > () {
- @Override
- protected ReqContext initialValue() {
- return new ReqContext(AccessController.getContext());
- }
- };
-
- //private constructor
- @VisibleForTesting
- ReqContext(AccessControlContext acl_ctxt) {
- _subject = Subject.getSubject(acl_ctxt);
- _reqID = uniqueId.incrementAndGet();
- }
-
- /**
- * client address
- */
- public void setRemoteAddress(InetAddress addr) {
- _remoteAddr = addr;
- }
-
- public InetAddress remoteAddress() {
- return _remoteAddr;
- }
-
- /**
- * Set remote subject explicitly
- */
- public void setSubject(Subject subject) {
- _subject = subject;
- }
-
- /**
- * Retrieve client subject associated with this request context
- */
- public Subject subject() {
- return _subject;
- }
-
- /**
- * The primary principal associated current subject
- */
- public Principal principal() {
- if (_subject == null) return null;
- Set<Principal> princs = _subject.getPrincipals();
- if (princs.size()==0) return null;
- return (Principal) (princs.toArray()[0]);
- }
-
- /**
- * request ID of this request
- */
- public Integer requestID() {
- return _reqID;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/SimpleTransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/SimpleTransportPlugin.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/SimpleTransportPlugin.java
deleted file mode 100644
index 411a5ff..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/SimpleTransportPlugin.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-import org.apache.thrift7.TException;
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.protocol.TProtocol;
-import org.apache.thrift7.server.THsHaServer;
-import org.apache.thrift7.server.TServer;
-import org.apache.thrift7.transport.TFramedTransport;
-import org.apache.thrift7.transport.TMemoryInputTransport;
-import org.apache.thrift7.transport.TNonblockingServerSocket;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Simple transport for Thrift plugin.
- *
- * This plugin is designed to be backward compatible with existing Storm code.
- */
-public class SimpleTransportPlugin implements ITransportPlugin {
- protected Configuration login_conf;
- private static final Logger LOG = LoggerFactory.getLogger(SimpleTransportPlugin.class);
-
- /**
- * Invoked once immediately after construction
- * @param conf Storm configuration
- * @param login_conf login configuration
- */
- public void prepare(Map storm_conf, Configuration login_conf) {
- this.login_conf = login_conf;
- }
-
- /**
- * We will let Thrift to apply default transport factory
- */
- public TServer getServer(int port, TProcessor processor) throws IOException, TTransportException {
- TNonblockingServerSocket serverTransport = new TNonblockingServerSocket(port);
- THsHaServer.Args server_args = new THsHaServer.Args(serverTransport).
- processor(new SimpleWrapProcessor(processor)).
- workerThreads(64).
- protocolFactory(new TBinaryProtocol.Factory());
-
- //construct THsHaServer
- return new THsHaServer(server_args);
- }
-
- /**
- * Connect to the specified server via framed transport
- * @param transport The underlying Thrift transport.
- */
- public TTransport connect(TTransport transport, String serverHost) throws TTransportException {
- //create a framed transport
- TTransport conn = new TFramedTransport(transport);
-
- //connect
- conn.open();
- LOG.debug("Simple client transport has been established");
-
- return conn;
- }
-
- /**
- * Processor that populate simple transport info into ReqContext, and then invoke a service handler
- */
- private class SimpleWrapProcessor implements TProcessor {
- final TProcessor wrapped;
-
- SimpleWrapProcessor(TProcessor wrapped) {
- this.wrapped = wrapped;
- }
-
- public boolean process(final TProtocol inProt, final TProtocol outProt) throws TException {
- //populating request context
- ReqContext req_context = ReqContext.context();
-
- TTransport trans = inProt.getTransport();
- if (trans instanceof TMemoryInputTransport) {
- try {
- req_context.setRemoteAddress(InetAddress.getLocalHost());
- } catch (UnknownHostException e) {
- throw new RuntimeException(e);
- }
- } else if (trans instanceof TSocket) {
- TSocket tsocket = (TSocket)trans;
- //remote address
- Socket socket = tsocket.getSocket();
- req_context.setRemoteAddress(socket.getInetAddress());
- }
-
- //anonymous user
- req_context.setSubject(null);
-
- //invoke service handler
- return wrapped.process(inProt, outProt);
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftClient.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftClient.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftClient.java
deleted file mode 100644
index 940b16d..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftClient.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-import java.io.IOException;
-import java.util.Map;
-import javax.security.auth.login.Configuration;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.protocol.TProtocol;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import backtype.storm.utils.Utils;
-
-public class ThriftClient {
- private static final Logger LOG = LoggerFactory.getLogger(ThriftClient.class);
- private TTransport _transport;
- protected TProtocol _protocol;
-
- public ThriftClient(Map storm_conf, String host, int port) throws TTransportException {
- this(storm_conf, host, port, null);
- }
-
- public ThriftClient(Map storm_conf, String host, int port, Integer timeout) throws TTransportException {
- try {
- //locate login configuration
- Configuration login_conf = AuthUtils.GetConfiguration(storm_conf);
-
- //construct a transport plugin
- ITransportPlugin transportPlugin = AuthUtils.GetTransportPlugin(storm_conf, login_conf);
-
- //create a socket with server
- if(host==null) {
- throw new IllegalArgumentException("host is not set");
- }
- if(port<=0) {
- throw new IllegalArgumentException("invalid port: "+port);
- }
- TSocket socket = new TSocket(host, port);
- if(timeout!=null) {
- socket.setTimeout(timeout);
- }
- final TTransport underlyingTransport = socket;
-
- //establish client-server transport via plugin
- _transport = transportPlugin.connect(underlyingTransport, host);
- } catch (IOException ex) {
- throw new RuntimeException(ex);
- }
- _protocol = null;
- if (_transport != null)
- _protocol = new TBinaryProtocol(_transport);
- }
-
- public TTransport transport() {
- return _transport;
- }
-
- public void close() {
- _transport.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftServer.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftServer.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftServer.java
deleted file mode 100644
index 1ab4311..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/thrift/ThriftServer.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package com.alibaba.jstorm.yarn.thrift;
-
-
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.server.TServer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ThriftServer {
- private static final Logger LOG = LoggerFactory
- .getLogger(ThriftServer.class);
- private Map _storm_conf; // storm configuration
- protected TProcessor _processor = null;
- private int _port = 0;
- private TServer _server = null;
- private Configuration _login_conf;
-
- public ThriftServer(Map storm_conf, TProcessor processor, int port) {
- try {
- _storm_conf = storm_conf;
- _processor = processor;
- _port = port;
-
- // retrieve authentication configuration
- _login_conf = AuthUtils.GetConfiguration(_storm_conf);
- } catch (Exception x) {
- LOG.error(x.getMessage(), x);
- }
- }
-
- public void stop() {
- if (_server != null)
- _server.stop();
- }
-
- /**
- * Is ThriftServer listening to requests?
- *
- * @return
- */
- public boolean isServing() {
- if (_server == null)
- return false;
- return _server.isServing();
- }
-
- public void serve() {
- try {
- // locate our thrift transport plugin
- ITransportPlugin transportPlugin = AuthUtils.GetTransportPlugin(
- _storm_conf, _login_conf);
-
- // server
- _server = transportPlugin.getServer(_port, _processor);
-
- // start accepting requests
- _server.serve();
- } catch (Exception ex) {
- LOG.error("ThriftServer is being stopped due to: " + ex, ex);
- if (_server != null)
- _server.stop();
- Runtime.getRuntime().halt(1); // shutdown server process since we
- // could not handle Thrift requests
- // any more
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/resources/logback.xml
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/resources/logback.xml b/jstorm-on-yarn/src/main/resources/logback.xml
deleted file mode 100644
index ac732b9..0000000
--- a/jstorm-on-yarn/src/main/resources/logback.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-
-<configuration scan="true" scanPeriod="60 seconds">
- <appender name="A1" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>${logfile.name}</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>${logfile.name}.%i</fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
-
- <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>100MB</maxFileSize>
- </triggeringPolicy>
-
- <encoder>
- <pattern>%d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n</pattern>
- </encoder>
- </appender>
-
- <appender name="ACCESS" class="ch.qos.logback.core.rolling.RollingFileAppender">
- <file>access.log</file>
- <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
- <fileNamePattern>access.log.%i</fileNamePattern>
- <minIndex>1</minIndex>
- <maxIndex>9</maxIndex>
- </rollingPolicy>
-
- <triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
- <maxFileSize>100MB</maxFileSize>
- </triggeringPolicy>
-
- <encoder>
- <pattern>%d{yyyy-MM-dd HH:mm:ss} %c{1} [%p] %m%n</pattern>
- </encoder>
- </appender>
-
- <root level="INFO">
- <appender-ref ref="A1"/>
- </root>
-
- <logger name="backtype.storm.security.auth.authorizer" additivity="false">
- <level value="INFO" />
- <appender-ref ref="ACCESS" />
- </logger>
-</configuration>
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/resources/master_defaults.yaml
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/resources/master_defaults.yaml b/jstorm-on-yarn/src/main/resources/master_defaults.yaml
deleted file mode 100644
index 314ba23..0000000
--- a/jstorm-on-yarn/src/main/resources/master_defaults.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright (c) 2013 Yahoo! Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License. See accompanying LICENSE file.
-#
-master.host: "localhost"
-master.thrift.port: 9000
-master.initial-num-supervisors: 1
-master.container.priority: 0
-master.container.size-mb: 5120
-master.heartbeat.interval.millis: 1000
-master.timeout.secs: 1000
-yarn.report.wait.millis: 10000
-nimbusui.startup.ms: 10000
-
-ui.port: 7070
-
-storm.messaging.transport: "backtype.storm.messaging.netty.Context"
-storm.messaging.netty.buffer_size: 1048576
-storm.messaging.netty.max_retries: 100
-storm.messaging.netty.min_wait_ms: 1000
-storm.messaging.netty.max_wait_ms: 5000
-
-# Configuration parameter that allows the launching machine to specify the JAVA_JOME
-# used when the application is executed on the YARN cluster.
-#
-# storm.yarn.java_home: "/vol/anarchy/boneill/jdk1.7.0_40"
-
-# Configuration parameter that allows the launching machine to specify the yarn classpath
-# used when the application is executed on the YARN cluster. To find this value, run
-# "yarn classpath" on the target machines.
-#
-# storm.yarn.yarn_classpath: "/home/boneill/hadoop/etc/hadoop:/home/boneill/hadoop/etc/hadoop:/home/boneill/hadoop/etc/hadoop:/home/boneill/hadoop/share/hadoop/common/lib/*:/home/boneill/hadoop/share/hadoop/common/*:/home/boneill/hadoop/share/hadoop/hdfs:/home/boneill/hadoop/share/hadoop/hdfs/lib/*:/home/boneill/hadoop/share/hadoop/hdfs/*:/home/boneill/hadoop/share/hadoop/yarn/lib/*:/home/boneill/hadoop/share/hadoop/yarn/*:/home/boneill/hadoop/share/hadoop/mapreduce/lib/*:/home/boneill/hadoop/share/hadoop/mapreduce/*:/Users/bone/tools/hadoop/contrib/capacity-scheduler/*.jar:/home/boneill/hadoop/share/hadoop/yarn/*:/home/boneill/hadoop/share/hadoop/yarn/lib/*"
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/test/java/com/taobao/jstorm/yarn/AppTest.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/test/java/com/taobao/jstorm/yarn/AppTest.java b/jstorm-on-yarn/src/test/java/com/taobao/jstorm/yarn/AppTest.java
deleted file mode 100644
index ec21b4b..0000000
--- a/jstorm-on-yarn/src/test/java/com/taobao/jstorm/yarn/AppTest.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package com.taobao.jstorm.yarn;
-
-import junit.framework.Test;
-import junit.framework.TestCase;
-import junit.framework.TestSuite;
-
-/**
- * Unit test for simple App.
- */
-public class AppTest
- extends TestCase
-{
- /**
- * Create the test case
- *
- * @param testName name of the test case
- */
- public AppTest( String testName )
- {
- super( testName );
- }
-
- /**
- * @return the suite of tests being tested
- */
- public static Test suite()
- {
- return new TestSuite( AppTest.class );
- }
-
- /**
- * Rigourous Test :-)
- */
- public void testApp()
- {
- assertTrue( true );
- }
-}
[18/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/MapReducerAggStateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/MapReducerAggStateUpdater.java b/jstorm-client/src/main/java/storm/trident/state/map/MapReducerAggStateUpdater.java
deleted file mode 100644
index f7c227b..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/MapReducerAggStateUpdater.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package storm.trident.state.map;
-
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.state.ReducerValueUpdater;
-import storm.trident.state.StateUpdater;
-import storm.trident.state.ValueUpdater;
-import storm.trident.tuple.ComboList;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-public class MapReducerAggStateUpdater implements StateUpdater<MapState> {
- ReducerAggregator _agg;
- Fields _groupFields;
- Fields _inputFields;
- ProjectionFactory _groupFactory;
- ProjectionFactory _inputFactory;
- ComboList.Factory _factory;
-
-
- public MapReducerAggStateUpdater(ReducerAggregator agg, Fields groupFields, Fields inputFields) {
- _agg = agg;
- _groupFields = groupFields;
- _inputFields = inputFields;
- _factory = new ComboList.Factory(groupFields.size(), 1);
- }
-
-
- @Override
- public void updateState(MapState map, List<TridentTuple> tuples, TridentCollector collector) {
- Map<List<Object>, List<TridentTuple>> grouped = new HashMap();
-
- //List<List<Object>> groups = new ArrayList<List<Object>>(tuples.size());
- //List<Object> values = new ArrayList<Object>(tuples.size());
- for(TridentTuple t: tuples) {
- List<Object> group = _groupFactory.create(t);
- List<TridentTuple> groupTuples = grouped.get(group);
- if(groupTuples==null) {
- groupTuples = new ArrayList();
- grouped.put(group, groupTuples);
- }
- groupTuples.add(_inputFactory.create(t));
- }
- List<List<Object>> uniqueGroups = new ArrayList(grouped.keySet());
- List<ValueUpdater> updaters = new ArrayList(uniqueGroups.size());
- for(List<Object> group: uniqueGroups) {
- updaters.add(new ReducerValueUpdater(_agg, grouped.get(group)));
- }
- List<Object> results = map.multiUpdate(uniqueGroups, updaters);
-
- for(int i=0; i<uniqueGroups.size(); i++) {
- List<Object> group = uniqueGroups.get(i);
- Object result = results.get(i);
- collector.emit(_factory.create(new List[] {group, new Values(result) }));
- }
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _groupFactory = context.makeProjectionFactory(_groupFields);
- _inputFactory = context.makeProjectionFactory(_inputFields);
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/MapState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/MapState.java b/jstorm-client/src/main/java/storm/trident/state/map/MapState.java
deleted file mode 100644
index 78901d9..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/MapState.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.List;
-import storm.trident.state.ValueUpdater;
-
-public interface MapState<T> extends ReadOnlyMapState<T> {
- List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters);
- void multiPut(List<List<Object>> keys, List<T> vals);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/MicroBatchIBackingMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/MicroBatchIBackingMap.java b/jstorm-client/src/main/java/storm/trident/state/map/MicroBatchIBackingMap.java
deleted file mode 100644
index 2f356b1..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/MicroBatchIBackingMap.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package storm.trident.state.map;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.LinkedList;
-import java.util.List;
-
-public class MicroBatchIBackingMap<T> implements IBackingMap<T> {
- IBackingMap<T> _delegate;
- Options _options;
-
-
- public static class Options implements Serializable {
- public int maxMultiGetBatchSize = 0; // 0 means delegate batch size = trident batch size.
- public int maxMultiPutBatchSize = 0;
- }
-
- public MicroBatchIBackingMap(final Options options, final IBackingMap<T> delegate) {
- _options = options;
- _delegate = delegate;
- assert options.maxMultiPutBatchSize >= 0;
- assert options.maxMultiGetBatchSize >= 0;
- }
-
- @Override
- public void multiPut(final List<List<Object>> keys, final List<T> values) {
- int thisBatchSize;
- if(_options.maxMultiPutBatchSize == 0) { thisBatchSize = keys.size(); }
- else { thisBatchSize = _options.maxMultiPutBatchSize; }
-
- LinkedList<List<Object>> keysTodo = new LinkedList<List<Object>>(keys);
- LinkedList<T> valuesTodo = new LinkedList<T>(values);
-
- while(!keysTodo.isEmpty()) {
- List<List<Object>> keysBatch = new ArrayList<List<Object>>(thisBatchSize);
- List<T> valuesBatch = new ArrayList<T>(thisBatchSize);
- for(int i=0; i<thisBatchSize && !keysTodo.isEmpty(); i++) {
- keysBatch.add(keysTodo.removeFirst());
- valuesBatch.add(valuesTodo.removeFirst());
- }
-
- _delegate.multiPut(keysBatch, valuesBatch);
- }
- }
-
- @Override
- public List<T> multiGet(final List<List<Object>> keys) {
- int thisBatchSize;
- if(_options.maxMultiGetBatchSize == 0) { thisBatchSize = keys.size(); }
- else { thisBatchSize = _options.maxMultiGetBatchSize; }
-
- LinkedList<List<Object>> keysTodo = new LinkedList<List<Object>>(keys);
-
- List<T> ret = new ArrayList<T>(keys.size());
-
- while(!keysTodo.isEmpty()) {
- List<List<Object>> keysBatch = new ArrayList<List<Object>>(thisBatchSize);
- for(int i=0; i<thisBatchSize && !keysTodo.isEmpty(); i++) {
- keysBatch.add(keysTodo.removeFirst());
- }
-
- List<T> retSubset = _delegate.multiGet(keysBatch);
- ret.addAll(retSubset);
- }
-
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/NonTransactionalMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/NonTransactionalMap.java b/jstorm-client/src/main/java/storm/trident/state/map/NonTransactionalMap.java
deleted file mode 100644
index 3a140b5..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/NonTransactionalMap.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package storm.trident.state.map;
-
-import storm.trident.state.ValueUpdater;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class NonTransactionalMap<T> implements MapState<T> {
- public static <T> MapState<T> build(IBackingMap<T> backing) {
- return new NonTransactionalMap<T>(backing);
- }
-
- IBackingMap<T> _backing;
-
- protected NonTransactionalMap(IBackingMap<T> backing) {
- _backing = backing;
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- return _backing.multiGet(keys);
- }
-
- @Override
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- List<T> curr = _backing.multiGet(keys);
- List<T> ret = new ArrayList<T>(curr.size());
- for(int i=0; i<curr.size(); i++) {
- T currVal = curr.get(i);
- ValueUpdater<T> updater = updaters.get(i);
- ret.add(updater.update(currVal));
- }
- _backing.multiPut(keys, ret);
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- _backing.multiPut(keys, vals);
- }
-
- @Override
- public void beginCommit(Long txid) {
- }
-
- @Override
- public void commit(Long txid) {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/OpaqueMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/OpaqueMap.java b/jstorm-client/src/main/java/storm/trident/state/map/OpaqueMap.java
deleted file mode 100644
index f646d66..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/OpaqueMap.java
+++ /dev/null
@@ -1,107 +0,0 @@
-package storm.trident.state.map;
-
-import storm.trident.state.OpaqueValue;
-import storm.trident.state.ValueUpdater;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class OpaqueMap<T> implements MapState<T> {
- public static <T> MapState<T> build(IBackingMap<OpaqueValue> backing) {
- return new OpaqueMap<T>(backing);
- }
-
- CachedBatchReadsMap<OpaqueValue> _backing;
- Long _currTx;
-
- protected OpaqueMap(IBackingMap<OpaqueValue> backing) {
- _backing = new CachedBatchReadsMap(backing);
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- List<CachedBatchReadsMap.RetVal<OpaqueValue>> curr = _backing.multiGet(keys);
- List<T> ret = new ArrayList<T>(curr.size());
- for(CachedBatchReadsMap.RetVal<OpaqueValue> retval: curr) {
- OpaqueValue val = retval.val;
- if(val!=null) {
- if(retval.cached) {
- ret.add((T) val.getCurr());
- } else {
- ret.add((T) val.get(_currTx));
- }
- } else {
- ret.add(null);
- }
- }
- return ret;
- }
-
- @Override
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- List<CachedBatchReadsMap.RetVal<OpaqueValue>> curr = _backing.multiGet(keys);
- List<OpaqueValue> newVals = new ArrayList<OpaqueValue>(curr.size());
- List<T> ret = new ArrayList<T>();
- for(int i=0; i<curr.size(); i++) {
- CachedBatchReadsMap.RetVal<OpaqueValue> retval = curr.get(i);
- OpaqueValue<T> val = retval.val;
- ValueUpdater<T> updater = updaters.get(i);
- T prev;
- if(val==null) {
- prev = null;
- } else {
- if(retval.cached) {
- prev = val.getCurr();
- } else {
- prev = val.get(_currTx);
- }
- }
- T newVal = updater.update(prev);
- ret.add(newVal);
- OpaqueValue<T> newOpaqueVal;
- if(val==null) {
- newOpaqueVal = new OpaqueValue<T>(_currTx, newVal);
- } else {
- newOpaqueVal = val.update(_currTx, newVal);
- }
- newVals.add(newOpaqueVal);
- }
- _backing.multiPut(keys, newVals);
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- List<ValueUpdater> updaters = new ArrayList<ValueUpdater>(vals.size());
- for(T val: vals) {
- updaters.add(new ReplaceUpdater<T>(val));
- }
- multiUpdate(keys, updaters);
- }
-
- @Override
- public void beginCommit(Long txid) {
- _currTx = txid;
- _backing.reset();
- }
-
- @Override
- public void commit(Long txid) {
- _currTx = null;
- _backing.reset();
- }
-
- static class ReplaceUpdater<T> implements ValueUpdater<T> {
- T _t;
-
- public ReplaceUpdater(T t) {
- _t = t;
- }
-
- @Override
- public T update(Object stored) {
- return _t;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/ReadOnlyMapState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/ReadOnlyMapState.java b/jstorm-client/src/main/java/storm/trident/state/map/ReadOnlyMapState.java
deleted file mode 100644
index 5a519c4..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/ReadOnlyMapState.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.List;
-import storm.trident.state.State;
-
-public interface ReadOnlyMapState<T> extends State {
- // certain states might only accept one-tuple keys - those should just throw an error
- List<T> multiGet(List<List<Object>> keys);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/RemovableMapState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/RemovableMapState.java b/jstorm-client/src/main/java/storm/trident/state/map/RemovableMapState.java
deleted file mode 100644
index cf34f05..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/RemovableMapState.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.List;
-import storm.trident.state.State;
-
-public interface RemovableMapState<T> extends State {
- void multiRemove(List<List<Object>> keys);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/SnapshottableMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/SnapshottableMap.java b/jstorm-client/src/main/java/storm/trident/state/map/SnapshottableMap.java
deleted file mode 100644
index f42a5c9..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/SnapshottableMap.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.Arrays;
-import java.util.List;
-import storm.trident.state.ValueUpdater;
-import storm.trident.state.snapshot.Snapshottable;
-
-
-public class SnapshottableMap<T> implements MapState<T>, Snapshottable<T> {
- MapState<T> _delegate;
- List<List<Object>> _keys;
-
- public SnapshottableMap(MapState<T> delegate, List<Object> snapshotKey) {
- _delegate = delegate;
- _keys = Arrays.asList(snapshotKey);
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- return _delegate.multiGet(keys);
- }
-
- @Override
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- return _delegate.multiUpdate(keys, updaters);
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- _delegate.multiPut(keys, vals);
- }
-
- @Override
- public void beginCommit(Long txid) {
- _delegate.beginCommit(txid);
- }
-
- @Override
- public void commit(Long txid) {
- _delegate.commit(txid);
- }
-
- @Override
- public T get() {
- return multiGet(_keys).get(0);
- }
-
- @Override
- public T update(ValueUpdater updater) {
- List<ValueUpdater> updaters = Arrays.asList(updater);
- return multiUpdate(_keys, updaters).get(0);
- }
-
- @Override
- public void set(T o) {
- multiPut(_keys, Arrays.asList(o));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/TransactionalMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/TransactionalMap.java b/jstorm-client/src/main/java/storm/trident/state/map/TransactionalMap.java
deleted file mode 100644
index 1f44910..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/TransactionalMap.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package storm.trident.state.map;
-
-import storm.trident.state.TransactionalValue;
-import storm.trident.state.ValueUpdater;
-
-import java.util.ArrayList;
-import java.util.List;
-
-
-public class TransactionalMap<T> implements MapState<T> {
- public static <T> MapState<T> build(IBackingMap<TransactionalValue> backing) {
- return new TransactionalMap<T>(backing);
- }
-
- CachedBatchReadsMap<TransactionalValue> _backing;
- Long _currTx;
-
- protected TransactionalMap(IBackingMap<TransactionalValue> backing) {
- _backing = new CachedBatchReadsMap(backing);
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- List<CachedBatchReadsMap.RetVal<TransactionalValue>> vals = _backing.multiGet(keys);
- List<T> ret = new ArrayList<T>(vals.size());
- for(CachedBatchReadsMap.RetVal<TransactionalValue> retval: vals) {
- TransactionalValue v = retval.val;
- if(v!=null) {
- ret.add((T) v.getVal());
- } else {
- ret.add(null);
- }
- }
- return ret;
- }
-
- @Override
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- List<CachedBatchReadsMap.RetVal<TransactionalValue>> curr = _backing.multiGet(keys);
- List<TransactionalValue> newVals = new ArrayList<TransactionalValue>(curr.size());
- List<List<Object>> newKeys = new ArrayList();
- List<T> ret = new ArrayList<T>();
- for(int i=0; i<curr.size(); i++) {
- CachedBatchReadsMap.RetVal<TransactionalValue> retval = curr.get(i);
- TransactionalValue<T> val = retval.val;
- ValueUpdater<T> updater = updaters.get(i);
- TransactionalValue<T> newVal;
- boolean changed = false;
- if(val==null) {
- newVal = new TransactionalValue<T>(_currTx, updater.update(null));
- changed = true;
- } else {
- if(_currTx!=null && _currTx.equals(val.getTxid()) && !retval.cached) {
- newVal = val;
- } else {
- newVal = new TransactionalValue<T>(_currTx, updater.update(val.getVal()));
- changed = true;
- }
- }
- ret.add(newVal.getVal());
- if(changed) {
- newVals.add(newVal);
- newKeys.add(keys.get(i));
- }
- }
- if(!newKeys.isEmpty()) {
- _backing.multiPut(newKeys, newVals);
- }
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- List<TransactionalValue> newVals = new ArrayList<TransactionalValue>(vals.size());
- for(T val: vals) {
- newVals.add(new TransactionalValue<T>(_currTx, val));
- }
- _backing.multiPut(keys, newVals);
- }
-
- @Override
- public void beginCommit(Long txid) {
- _currTx = txid;
- _backing.reset();
- }
-
- @Override
- public void commit(Long txid) {
- _currTx = null;
- _backing.reset();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/snapshot/ReadOnlySnapshottable.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/snapshot/ReadOnlySnapshottable.java b/jstorm-client/src/main/java/storm/trident/state/snapshot/ReadOnlySnapshottable.java
deleted file mode 100644
index 2064a98..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/snapshot/ReadOnlySnapshottable.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package storm.trident.state.snapshot;
-
-import storm.trident.state.State;
-
-public interface ReadOnlySnapshottable<T> extends State {
- T get();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/snapshot/Snapshottable.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/snapshot/Snapshottable.java b/jstorm-client/src/main/java/storm/trident/state/snapshot/Snapshottable.java
deleted file mode 100644
index f216485..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/snapshot/Snapshottable.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package storm.trident.state.snapshot;
-
-import storm.trident.state.ValueUpdater;
-
-
-// used by Stream#persistentAggregate
-public interface Snapshottable<T> extends ReadOnlySnapshottable<T> {
- T update(ValueUpdater updater);
- void set(T o);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/CountAsAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/CountAsAggregator.java b/jstorm-client/src/main/java/storm/trident/testing/CountAsAggregator.java
deleted file mode 100644
index 52f482f..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/CountAsAggregator.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.tuple.Values;
-import storm.trident.operation.BaseAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-
-public class CountAsAggregator extends BaseAggregator<CountAsAggregator.State> {
-
- static class State {
- long count = 0;
- }
-
- @Override
- public State init(Object batchId, TridentCollector collector) {
- return new State();
- }
-
- @Override
- public void aggregate(State state, TridentTuple tuple, TridentCollector collector) {
- state.count++;
- }
-
- @Override
- public void complete(State state, TridentCollector collector) {
- collector.emit(new Values(state.count));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/FeederBatchSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/FeederBatchSpout.java b/jstorm-client/src/main/java/storm/trident/testing/FeederBatchSpout.java
deleted file mode 100644
index 5571153..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/FeederBatchSpout.java
+++ /dev/null
@@ -1,168 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.RegisteredGlobalState;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.Semaphore;
-import storm.trident.operation.TridentCollector;
-import storm.trident.spout.ITridentSpout;
-import storm.trident.topology.TransactionAttempt;
-import storm.trident.topology.TridentTopologyBuilder;
-
-public class FeederBatchSpout implements ITridentSpout, IFeeder {
-
- String _id;
- String _semaphoreId;
- Fields _outFields;
- boolean _waitToEmit = true;
-
-
- public FeederBatchSpout(List<String> fields) {
- _outFields = new Fields(fields);
- _id = RegisteredGlobalState.registerState(new CopyOnWriteArrayList());
- _semaphoreId = RegisteredGlobalState.registerState(new CopyOnWriteArrayList());
- }
-
- public void setWaitToEmit(boolean trueIfWait) {
- _waitToEmit = trueIfWait;
- }
-
- public void feed(Object tuples) {
- Semaphore sem = new Semaphore(0);
- ((List)RegisteredGlobalState.getState(_semaphoreId)).add(sem);
- ((List)RegisteredGlobalState.getState(_id)).add(tuples);
- try {
- sem.acquire();
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
-
-
- public class FeederCoordinator implements ITridentSpout.BatchCoordinator<Map<Integer, List<List<Object>>>> {
-
- int _numPartitions;
- int _emittedIndex = 0;
- Map<Long, Integer> txIndices = new HashMap();
-
- public FeederCoordinator(int numPartitions) {
- _numPartitions = numPartitions;
- }
-
- @Override
- public Map<Integer, List<List<Object>>> initializeTransaction(long txid, Map<Integer, List<List<Object>>> prevMetadata, Map<Integer, List<List<Object>>> currMetadata) {
- if(currMetadata!=null) return currMetadata;
- List allBatches = (List) RegisteredGlobalState.getState(_id);
- if(allBatches.size()>_emittedIndex) {
- Object batchInfo = allBatches.get(_emittedIndex);
- txIndices.put(txid, _emittedIndex);
- _emittedIndex += 1;
- if(batchInfo instanceof Map) {
- return (Map) batchInfo;
- } else {
- List batchList = (List) batchInfo;
- Map<Integer, List<List<Object>>> partitions = new HashMap();
- for(int i=0; i<_numPartitions; i++) {
- partitions.put(i, new ArrayList());
- }
- for(int i=0; i<batchList.size(); i++) {
- int partition = i % _numPartitions;
- partitions.get(partition).add((List)batchList.get(i));
- }
- return partitions;
- }
- } else {
- return new HashMap();
- }
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void success(long txid) {
- Integer index = txIndices.get(txid);
- if(index != null) {
- Semaphore sem = (Semaphore) ((List)RegisteredGlobalState.getState(_semaphoreId)).get(index);
- sem.release();
- }
- }
-
- int _masterEmitted = 0;
-
- @Override
- public boolean isReady(long txid) {
- if(!_waitToEmit) return true;
- List allBatches = (List) RegisteredGlobalState.getState(_id);
- if(allBatches.size() > _masterEmitted) {
- _masterEmitted++;
- return true;
- } else {
- Utils.sleep(2);
- return false;
- }
- }
- }
-
- public class FeederEmitter implements ITridentSpout.Emitter<Map<Integer, List<List<Object>>>> {
-
- int _index;
-
- public FeederEmitter(int index) {
- _index = index;
- }
-
- @Override
- public void emitBatch(TransactionAttempt tx, Map<Integer, List<List<Object>>> coordinatorMeta, TridentCollector collector) {
- List<List<Object>> tuples = coordinatorMeta.get(_index);
- if(tuples!=null) {
- for(List<Object> t: tuples) {
- collector.emit(t);
- }
- }
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- }
-
- @Override
- public void close() {
- }
- }
-
-
- @Override
- public Map getComponentConfiguration() {
- return null;
- }
-
- @Override
- public Fields getOutputFields() {
- return _outFields;
- }
-
- @Override
- public BatchCoordinator getCoordinator(String txStateId, Map conf, TopologyContext context) {
- int numTasks = context.getComponentTasks(
- TridentTopologyBuilder.spoutIdFromCoordinatorId(
- context.getThisComponentId()))
- .size();
- return new FeederCoordinator(numTasks);
- }
-
- @Override
- public Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- return new FeederEmitter(context.getThisTaskIndex());
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/FeederCommitterBatchSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/FeederCommitterBatchSpout.java b/jstorm-client/src/main/java/storm/trident/testing/FeederCommitterBatchSpout.java
deleted file mode 100644
index d105c0c..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/FeederCommitterBatchSpout.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-import storm.trident.spout.ICommitterTridentSpout;
-import storm.trident.spout.ITridentSpout;
-import storm.trident.topology.TransactionAttempt;
-
-
-public class FeederCommitterBatchSpout implements ICommitterTridentSpout, IFeeder {
-
- FeederBatchSpout _spout;
-
- public FeederCommitterBatchSpout(List<String> fields) {
- _spout = new FeederBatchSpout(fields);
- }
-
- public void setWaitToEmit(boolean trueIfWait) {
- _spout.setWaitToEmit(trueIfWait);
- }
-
- static class CommitterEmitter implements ICommitterTridentSpout.Emitter {
- ITridentSpout.Emitter _emitter;
-
-
- public CommitterEmitter(ITridentSpout.Emitter e) {
- _emitter = e;
- }
-
- @Override
- public void commit(TransactionAttempt attempt) {
- }
-
- @Override
- public void emitBatch(TransactionAttempt tx, Object coordinatorMeta, TridentCollector collector) {
- _emitter.emitBatch(tx, coordinatorMeta, collector);
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- _emitter.success(tx);
- }
-
- @Override
- public void close() {
- _emitter.close();
- }
-
- }
-
- @Override
- public Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- return new CommitterEmitter(_spout.getEmitter(txStateId, conf, context));
- }
-
- @Override
- public BatchCoordinator getCoordinator(String txStateId, Map conf, TopologyContext context) {
- return _spout.getCoordinator(txStateId, conf, context);
- }
-
- @Override
- public Fields getOutputFields() {
- return _spout.getOutputFields();
- }
-
- @Override
- public Map getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
- @Override
- public void feed(Object tuples) {
- _spout.feed(tuples);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/FixedBatchSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/FixedBatchSpout.java b/jstorm-client/src/main/java/storm/trident/testing/FixedBatchSpout.java
deleted file mode 100644
index 6e32c1a..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/FixedBatchSpout.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.HashMap;
-
-import storm.trident.operation.TridentCollector;
-import storm.trident.spout.IBatchSpout;
-
-
-public class FixedBatchSpout implements IBatchSpout {
-
- Fields fields;
- List<Object>[] outputs;
- int maxBatchSize;
- HashMap<Long, List<List<Object>>> batches = new HashMap<Long, List<List<Object>>>();
-
- public FixedBatchSpout(Fields fields, int maxBatchSize, List<Object>... outputs) {
- this.fields = fields;
- this.outputs = outputs;
- this.maxBatchSize = maxBatchSize;
- }
-
- int index = 0;
- boolean cycle = false;
-
- public void setCycle(boolean cycle) {
- this.cycle = cycle;
- }
-
- @Override
- public void open(Map conf, TopologyContext context) {
- index = 0;
- }
-
- @Override
- public void emitBatch(long batchId, TridentCollector collector) {
- List<List<Object>> batch = this.batches.get(batchId);
- if(batch == null){
- batch = new ArrayList<List<Object>>();
- if(index>=outputs.length && cycle) {
- index = 0;
- }
- for(int i=0; index < outputs.length && i < maxBatchSize; index++, i++) {
- batch.add(outputs[index]);
- }
- this.batches.put(batchId, batch);
- }
- for(List<Object> list : batch){
- collector.emit(list);
- }
- }
-
- @Override
- public void ack(long batchId) {
- this.batches.remove(batchId);
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public Map getComponentConfiguration() {
- Config conf = new Config();
- conf.setMaxTaskParallelism(1);
- return conf;
- }
-
- @Override
- public Fields getOutputFields() {
- return fields;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/IFeeder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/IFeeder.java b/jstorm-client/src/main/java/storm/trident/testing/IFeeder.java
deleted file mode 100644
index eaf02bb..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/IFeeder.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package storm.trident.testing;
-
-
-public interface IFeeder {
- void feed(Object tuples);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/LRUMemoryMapState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/LRUMemoryMapState.java b/jstorm-client/src/main/java/storm/trident/testing/LRUMemoryMapState.java
deleted file mode 100644
index 51c8ffb..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/LRUMemoryMapState.java
+++ /dev/null
@@ -1,135 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.task.IMetricsContext;
-import storm.trident.state.ITupleCollection;
-import backtype.storm.tuple.Values;
-import java.util.*;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import storm.trident.state.OpaqueValue;
-import storm.trident.state.State;
-import storm.trident.state.StateFactory;
-import storm.trident.state.ValueUpdater;
-import storm.trident.state.map.*;
-import storm.trident.state.snapshot.Snapshottable;
-import storm.trident.util.LRUMap;
-
-public class LRUMemoryMapState<T> implements Snapshottable<T>, ITupleCollection, MapState<T> {
-
- LRUMemoryMapStateBacking<OpaqueValue> _backing;
- SnapshottableMap<T> _delegate;
-
- public LRUMemoryMapState(int cacheSize, String id) {
- _backing = new LRUMemoryMapStateBacking(cacheSize, id);
- _delegate = new SnapshottableMap(OpaqueMap.build(_backing), new Values("$MEMORY-MAP-STATE-GLOBAL$"));
- }
-
- public T update(ValueUpdater updater) {
- return _delegate.update(updater);
- }
-
- public void set(T o) {
- _delegate.set(o);
- }
-
- public T get() {
- return _delegate.get();
- }
-
- public void beginCommit(Long txid) {
- _delegate.beginCommit(txid);
- }
-
- public void commit(Long txid) {
- _delegate.commit(txid);
- }
-
- public Iterator<List<Object>> getTuples() {
- return _backing.getTuples();
- }
-
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- return _delegate.multiUpdate(keys, updaters);
- }
-
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- _delegate.multiPut(keys, vals);
- }
-
- public List<T> multiGet(List<List<Object>> keys) {
- return _delegate.multiGet(keys);
- }
-
- public static class Factory implements StateFactory {
-
- String _id;
- int _maxSize;
-
- public Factory(int maxSize) {
- _id = UUID.randomUUID().toString();
- _maxSize = maxSize;
- }
-
- @Override
- public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
- return new LRUMemoryMapState(_maxSize, _id + partitionIndex);
- }
- }
-
- static ConcurrentHashMap<String, Map<List<Object>, Object>> _dbs = new ConcurrentHashMap<String, Map<List<Object>, Object>>();
- static class LRUMemoryMapStateBacking<T> implements IBackingMap<T>, ITupleCollection {
-
- public static void clearAll() {
- _dbs.clear();
- }
- Map<List<Object>, T> db;
- Long currTx;
-
- public LRUMemoryMapStateBacking(int cacheSize, String id) {
- _dbs.putIfAbsent(id, new LRUMap<List<Object>, Object>(cacheSize));
- this.db = (Map<List<Object>, T>) _dbs.get(id);
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- List<T> ret = new ArrayList();
- for (List<Object> key : keys) {
- ret.add(db.get(key));
- }
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- for (int i = 0; i < keys.size(); i++) {
- List<Object> key = keys.get(i);
- T val = vals.get(i);
- db.put(key, val);
- }
- }
-
- @Override
- public Iterator<List<Object>> getTuples() {
- return new Iterator<List<Object>>() {
-
- private Iterator<Map.Entry<List<Object>, T>> it = db.entrySet().iterator();
-
- public boolean hasNext() {
- return it.hasNext();
- }
-
- public List<Object> next() {
- Map.Entry<List<Object>, T> e = it.next();
- List<Object> ret = new ArrayList<Object>();
- ret.addAll(e.getKey());
- ret.add(((OpaqueValue)e.getValue()).getCurr());
- return ret;
- }
-
- public void remove() {
- throw new UnsupportedOperationException("Not supported yet.");
- }
- };
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/MemoryBackingMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/MemoryBackingMap.java b/jstorm-client/src/main/java/storm/trident/testing/MemoryBackingMap.java
deleted file mode 100644
index e222ba6..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/MemoryBackingMap.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package storm.trident.testing;
-
-import storm.trident.state.map.IBackingMap;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class MemoryBackingMap implements IBackingMap<Object> {
- Map _vals = new HashMap();
-
- @Override
- public List<Object> multiGet(List<List<Object>> keys) {
- List ret = new ArrayList();
- for(List key: keys) {
- ret.add(_vals.get(key));
- }
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<Object> vals) {
- for(int i=0; i<keys.size(); i++) {
- List key = keys.get(i);
- Object val = vals.get(i);
- _vals.put(key, val);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/MemoryMapState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/MemoryMapState.java b/jstorm-client/src/main/java/storm/trident/testing/MemoryMapState.java
deleted file mode 100644
index 9512d63..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/MemoryMapState.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.task.IMetricsContext;
-import storm.trident.state.ITupleCollection;
-import backtype.storm.tuple.Values;
-import java.util.*;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import storm.trident.state.OpaqueValue;
-import storm.trident.state.State;
-import storm.trident.state.StateFactory;
-import storm.trident.state.ValueUpdater;
-import storm.trident.state.map.*;
-import storm.trident.state.snapshot.Snapshottable;
-
-public class MemoryMapState<T> implements Snapshottable<T>, ITupleCollection, MapState<T>, RemovableMapState<T> {
-
- MemoryMapStateBacking<OpaqueValue> _backing;
- SnapshottableMap<T> _delegate;
- List<List<Object>> _removed = new ArrayList();
- Long _currTx = null;
-
-
- public MemoryMapState(String id) {
- _backing = new MemoryMapStateBacking(id);
- _delegate = new SnapshottableMap(OpaqueMap.build(_backing), new Values("$MEMORY-MAP-STATE-GLOBAL$"));
- }
-
- public T update(ValueUpdater updater) {
- return _delegate.update(updater);
- }
-
- public void set(T o) {
- _delegate.set(o);
- }
-
- public T get() {
- return _delegate.get();
- }
-
- public void beginCommit(Long txid) {
- _delegate.beginCommit(txid);
- if(txid==null || !txid.equals(_currTx)) {
- _backing.multiRemove(_removed);
- }
- _removed = new ArrayList();
- _currTx = txid;
- }
-
- public void commit(Long txid) {
- _delegate.commit(txid);
- }
-
- public Iterator<List<Object>> getTuples() {
- return _backing.getTuples();
- }
-
- public List<T> multiUpdate(List<List<Object>> keys, List<ValueUpdater> updaters) {
- return _delegate.multiUpdate(keys, updaters);
- }
-
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- _delegate.multiPut(keys, vals);
- }
-
- public List<T> multiGet(List<List<Object>> keys) {
- return _delegate.multiGet(keys);
- }
-
- @Override
- public void multiRemove(List<List<Object>> keys) {
- List nulls = new ArrayList();
- for(int i=0; i<keys.size(); i++) {
- nulls.add(null);
- }
- // first just set the keys to null, then flag to remove them at beginning of next commit when we know the current and last value are both null
- multiPut(keys, nulls);
- _removed.addAll(keys);
- }
-
- public static class Factory implements StateFactory {
-
- String _id;
-
- public Factory() {
- _id = UUID.randomUUID().toString();
- }
-
- @Override
- public State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
- return new MemoryMapState(_id + partitionIndex);
- }
- }
-
- static ConcurrentHashMap<String, Map<List<Object>, Object>> _dbs = new ConcurrentHashMap<String, Map<List<Object>, Object>>();
- static class MemoryMapStateBacking<T> implements IBackingMap<T>, ITupleCollection {
-
- public static void clearAll() {
- _dbs.clear();
- }
- Map<List<Object>, T> db;
- Long currTx;
-
- public MemoryMapStateBacking(String id) {
- _dbs.putIfAbsent(id, new HashMap());
- this.db = (Map<List<Object>, T>) _dbs.get(id);
- }
-
- public void multiRemove(List<List<Object>> keys) {
- for(List<Object> key: keys) {
- db.remove(key);
- }
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- List<T> ret = new ArrayList();
- for (List<Object> key : keys) {
- ret.add(db.get(key));
- }
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- for (int i = 0; i < keys.size(); i++) {
- List<Object> key = keys.get(i);
- T val = vals.get(i);
- db.put(key, val);
- }
- }
-
- @Override
- public Iterator<List<Object>> getTuples() {
- return new Iterator<List<Object>>() {
-
- private Iterator<Map.Entry<List<Object>, T>> it = db.entrySet().iterator();
-
- public boolean hasNext() {
- return it.hasNext();
- }
-
- public List<Object> next() {
- Map.Entry<List<Object>, T> e = it.next();
- List<Object> ret = new ArrayList<Object>();
- ret.addAll(e.getKey());
- ret.add(((OpaqueValue)e.getValue()).getCurr());
- return ret;
- }
-
- public void remove() {
- throw new UnsupportedOperationException("Not supported yet.");
- }
- };
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/Split.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/Split.java b/jstorm-client/src/main/java/storm/trident/testing/Split.java
deleted file mode 100644
index 65cdb8b..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/Split.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.tuple.Values;
-import storm.trident.operation.BaseFunction;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-public class Split extends BaseFunction {
-
- @Override
- public void execute(TridentTuple tuple, TridentCollector collector) {
- for(String word: tuple.getString(0).split(" ")) {
- if(word.length() > 0) {
- collector.emit(new Values(word));
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/StringLength.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/StringLength.java b/jstorm-client/src/main/java/storm/trident/testing/StringLength.java
deleted file mode 100644
index f99a5c7..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/StringLength.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package storm.trident.testing;
-
-import backtype.storm.tuple.Values;
-import storm.trident.operation.BaseFunction;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-public class StringLength extends BaseFunction {
-
- @Override
- public void execute(TridentTuple tuple, TridentCollector collector) {
- collector.emit(new Values(tuple.getString(0).length()));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/TrueFilter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/TrueFilter.java b/jstorm-client/src/main/java/storm/trident/testing/TrueFilter.java
deleted file mode 100644
index 6912063..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/TrueFilter.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package storm.trident.testing;
-
-import storm.trident.operation.BaseFilter;
-import storm.trident.tuple.TridentTuple;
-
-public class TrueFilter extends BaseFilter {
-
- @Override
- public boolean isKeep(TridentTuple tuple) {
- return true;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/testing/TuplifyArgs.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/testing/TuplifyArgs.java b/jstorm-client/src/main/java/storm/trident/testing/TuplifyArgs.java
deleted file mode 100644
index 764e51e..0000000
--- a/jstorm-client/src/main/java/storm/trident/testing/TuplifyArgs.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package storm.trident.testing;
-
-import java.util.List;
-
-import storm.trident.operation.BaseFunction;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-import backtype.storm.utils.Utils;
-
-public class TuplifyArgs extends BaseFunction {
-
- @Override
- public void execute(TridentTuple input, TridentCollector collector) {
- String args = input.getString(0);
- List<List<Object>> tuples = (List) Utils.from_json(args);
- for(List<Object> tuple: tuples) {
- collector.emit(tuple);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/BatchInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/BatchInfo.java b/jstorm-client/src/main/java/storm/trident/topology/BatchInfo.java
deleted file mode 100644
index a3e3076..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/BatchInfo.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package storm.trident.topology;
-
-import storm.trident.spout.IBatchID;
-
-
-public class BatchInfo {
- public IBatchID batchId;
- public Object state;
- public String batchGroup;
-
- public BatchInfo(String batchGroup, IBatchID batchId, Object state) {
- this.batchGroup = batchGroup;
- this.batchId = batchId;
- this.state = state;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/ITridentBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/ITridentBatchBolt.java b/jstorm-client/src/main/java/storm/trident/topology/ITridentBatchBolt.java
deleted file mode 100644
index b6f60ce..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/ITridentBatchBolt.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package storm.trident.topology;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IComponent;
-import backtype.storm.tuple.Tuple;
-import java.util.Map;
-
-public interface ITridentBatchBolt extends IComponent {
- void prepare(Map conf, TopologyContext context, BatchOutputCollector collector);
- void execute(BatchInfo batchInfo, Tuple tuple);
- void finishBatch(BatchInfo batchInfo);
- Object initBatchState(String batchGroup, Object batchId);
- void cleanup();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/MasterBatchCoordinator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/MasterBatchCoordinator.java b/jstorm-client/src/main/java/storm/trident/topology/MasterBatchCoordinator.java
deleted file mode 100644
index 201696e..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/MasterBatchCoordinator.java
+++ /dev/null
@@ -1,317 +0,0 @@
-package storm.trident.topology;
-
-import backtype.storm.Config;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.WindowedTimeThrottler;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TreeMap;
-import java.util.Random;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.apache.log4j.Logger;
-import storm.trident.spout.ITridentSpout;
-import storm.trident.spout.ICommitterTridentSpout;
-import storm.trident.topology.state.TransactionalState;
-
-public class MasterBatchCoordinator extends BaseRichSpout {
- public static final Logger LOG = Logger.getLogger(MasterBatchCoordinator.class);
-
- public static final long INIT_TXID = 1L;
-
-
- public static final String BATCH_STREAM_ID = "$batch";
- public static final String COMMIT_STREAM_ID = "$commit";
- public static final String SUCCESS_STREAM_ID = "$success";
-
- private static final String CURRENT_TX = "currtx";
- private static final String CURRENT_ATTEMPTS = "currattempts";
-
- private static enum Operation {
- ACK,
- FAIL,
- NEXTTUPLE
- }
-
- private List<TransactionalState> _states = new ArrayList();
-
- TreeMap<Long, TransactionStatus> _activeTx = new TreeMap<Long, TransactionStatus>();
- TreeMap<Long, Integer> _attemptIds;
-
- private SpoutOutputCollector _collector;
- Long _currTransaction;
- int _maxTransactionActive;
-
- List<ITridentSpout.BatchCoordinator> _coordinators = new ArrayList();
-
-
- List<String> _managedSpoutIds;
- List<ITridentSpout> _spouts;
- WindowedTimeThrottler _throttler;
-
- boolean _active = true;
-
- AtomicBoolean failedOccur = new AtomicBoolean(false);
-
- public MasterBatchCoordinator(List<String> spoutIds, List<ITridentSpout> spouts) {
- if(spoutIds.isEmpty()) {
- throw new IllegalArgumentException("Must manage at least one spout");
- }
- _managedSpoutIds = spoutIds;
- _spouts = spouts;
- }
-
- public List<String> getManagedSpoutIds(){
- return _managedSpoutIds;
- }
-
- @Override
- public void activate() {
- _active = true;
- }
-
- @Override
- public void deactivate() {
- _active = false;
- }
-
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- _throttler = new WindowedTimeThrottler((Number)conf.get(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS), 1);
- for(String spoutId: _managedSpoutIds) {
- _states.add(TransactionalState.newCoordinatorState(conf, spoutId));
- }
- _currTransaction = getStoredCurrTransaction();
-
- _collector = collector;
- Number active = (Number) conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING);
- if(active==null) {
- _maxTransactionActive = 1;
- } else {
- _maxTransactionActive = active.intValue();
- }
- _attemptIds = getStoredCurrAttempts(_currTransaction, _maxTransactionActive);
-
-
- for(int i=0; i<_spouts.size(); i++) {
- String txId = _managedSpoutIds.get(i);
- _coordinators.add(_spouts.get(i).getCoordinator(txId, conf, context));
- }
- }
-
- @Override
- public void close() {
- for(TransactionalState state: _states) {
- state.close();
- }
- }
-
- @Override
- public void nextTuple() {
- sync(Operation.NEXTTUPLE, null);
- }
-
- @Override
- public void ack(Object msgId) {
- sync(Operation.ACK, (TransactionAttempt) msgId);
- }
-
- @Override
- public void fail(Object msgId) {
- sync(Operation.FAIL, (TransactionAttempt) msgId);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- // in partitioned example, in case an emitter task receives a later transaction than it's emitted so far,
- // when it sees the earlier txid it should know to emit nothing
- declarer.declareStream(BATCH_STREAM_ID, new Fields("tx"));
- declarer.declareStream(COMMIT_STREAM_ID, new Fields("tx"));
- declarer.declareStream(SUCCESS_STREAM_ID, new Fields("tx"));
- }
-
- synchronized private void sync(Operation op, TransactionAttempt attempt) {
- TransactionStatus status;
- long txid;
-
- switch (op) {
- case FAIL:
- // Remove the failed one and the items whose id is higher than the failed one.
- // Then those ones will be retried when nextTuple.
- txid = attempt.getTransactionId();
- status = _activeTx.remove(txid);
- if(status!=null && status.attempt.equals(attempt)) {
- _activeTx.tailMap(txid).clear();
- }
- break;
-
- case ACK:
- txid = attempt.getTransactionId();
- status = _activeTx.get(txid);
- if(status!=null && attempt.equals(status.attempt)) {
- if(status.status==AttemptStatus.PROCESSING ) {
- status.status = AttemptStatus.PROCESSED;
- } else if(status.status==AttemptStatus.COMMITTING) {
- status.status = AttemptStatus.COMMITTED;
- }
- }
- break;
-
- case NEXTTUPLE:
- // note that sometimes the tuples active may be less than max_spout_pending, e.g.
- // max_spout_pending = 3
- // tx 1, 2, 3 active, tx 2 is acked. there won't be a commit for tx 2 (because tx 1 isn't committed yet),
- // and there won't be a batch for tx 4 because there's max_spout_pending tx active
- status = _activeTx.get(_currTransaction);
- if (status!=null) {
- if(status.status == AttemptStatus.PROCESSED) {
- status.status = AttemptStatus.COMMITTING;
- _collector.emit(COMMIT_STREAM_ID, new Values(status.attempt), status.attempt);
- } else if (status.status == AttemptStatus.COMMITTED) {
- _activeTx.remove(status.attempt.getTransactionId());
- _attemptIds.remove(status.attempt.getTransactionId());
- _collector.emit(SUCCESS_STREAM_ID, new Values(status.attempt));
- _currTransaction = nextTransactionId(status.attempt.getTransactionId());
- for(TransactionalState state: _states) {
- state.setData(CURRENT_TX, _currTransaction);
- }
- }
- }
-
- if(_active) {
- if(_activeTx.size() < _maxTransactionActive) {
- Long curr = _currTransaction;
- for(int i=0; i<_maxTransactionActive; i++) {
- if(batchDelay()) {
- break;
- }
-
- if(isReady(curr)) {
- if(!_activeTx.containsKey(curr)) {
- // by using a monotonically increasing attempt id, downstream tasks
- // can be memory efficient by clearing out state for old attempts
- // as soon as they see a higher attempt id for a transaction
- Integer attemptId = _attemptIds.get(curr);
- if(attemptId==null) {
- attemptId = 0;
- } else {
- attemptId++;
- }
- _attemptIds.put(curr, attemptId);
- for(TransactionalState state: _states) {
- state.setData(CURRENT_ATTEMPTS, _attemptIds);
- }
-
- TransactionAttempt currAttempt = new TransactionAttempt(curr, attemptId);
- _activeTx.put(curr, new TransactionStatus(currAttempt));
- _collector.emit(BATCH_STREAM_ID, new Values(currAttempt), currAttempt);
- _throttler.markEvent();
- break;
- }
- }
- curr = nextTransactionId(curr);
- }
- } else {
- // Do nothing
- }
- }
- break;
-
- default:
- LOG.warn("Unknow Operation code=" + op);
- break;
- }
- }
-
- private boolean isReady(long txid) {
- //TODO: make this strategy configurable?... right now it goes if anyone is ready
- for(ITridentSpout.BatchCoordinator coord: _coordinators) {
- if(coord.isReady(txid)) return true;
- }
- return false;
- }
-
- private boolean batchDelay() {
- return _throttler.isThrottled();
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Config ret = new Config();
- ret.setMaxTaskParallelism(1);
- ret.registerSerialization(TransactionAttempt.class);
- return ret;
- }
-
- private static enum AttemptStatus {
- PROCESSING,
- PROCESSED,
- COMMITTING,
- COMMITTED
- }
-
- private static class TransactionStatus {
- TransactionAttempt attempt;
- AttemptStatus status;
-
- public TransactionStatus(TransactionAttempt attempt) {
- this.attempt = attempt;
- this.status = AttemptStatus.PROCESSING;
- }
-
- @Override
- public String toString() {
- return attempt.toString() + " <" + status.toString() + ">";
- }
- }
-
-
- private Long nextTransactionId(Long id) {
- return id + 1;
- }
-
- private Long getStoredCurrTransaction() {
- Long ret = INIT_TXID;
- for(TransactionalState state: _states) {
- Long curr = (Long) state.getData(CURRENT_TX);
- if(curr!=null && curr.compareTo(ret) > 0) {
- ret = curr;
- }
- }
- return ret;
- }
-
- private TreeMap<Long, Integer> getStoredCurrAttempts(long currTransaction, int maxBatches) {
- TreeMap<Long, Integer> ret = new TreeMap<Long, Integer>();
- for(TransactionalState state: _states) {
- Map<Object, Number> attempts = (Map) state.getData(CURRENT_ATTEMPTS);
- if(attempts==null) attempts = new HashMap();
- for(Entry<Object, Number> e: attempts.entrySet()) {
- // this is because json doesn't allow numbers as keys...
- // TODO: replace json with a better form of encoding
- Number txidObj;
- if(e.getKey() instanceof String) {
- txidObj = Long.parseLong((String) e.getKey());
- } else {
- txidObj = (Number) e.getKey();
- }
- long txid = ((Number) txidObj).longValue();
- int attemptId = ((Number) e.getValue()).intValue();
- Integer curr = ret.get(txid);
- if(curr==null || attemptId > curr) {
- ret.put(txid, attemptId);
- }
- }
- }
- ret.headMap(currTransaction).clear();
- ret.tailMap(currTransaction + maxBatches - 1).clear();
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/TransactionAttempt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/TransactionAttempt.java b/jstorm-client/src/main/java/storm/trident/topology/TransactionAttempt.java
deleted file mode 100644
index b2ea328..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/TransactionAttempt.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package storm.trident.topology;
-
-import storm.trident.spout.IBatchID;
-
-
-public class TransactionAttempt implements IBatchID {
- Long _txid;
- int _attemptId;
-
-
- // for kryo compatibility
- public TransactionAttempt() {
-
- }
-
- public TransactionAttempt(Long txid, int attemptId) {
- _txid = txid;
- _attemptId = attemptId;
- }
-
- public Long getTransactionId() {
- return _txid;
- }
-
- public Object getId() {
- return _txid;
- }
-
- public int getAttemptId() {
- return _attemptId;
- }
-
- @Override
- public int hashCode() {
- return _txid.hashCode();
- }
-
- @Override
- public boolean equals(Object o) {
- if(!(o instanceof TransactionAttempt)) return false;
- TransactionAttempt other = (TransactionAttempt) o;
- return _txid.equals(other._txid) && _attemptId == other._attemptId;
- }
-
- @Override
- public String toString() {
- return "" + _txid + ":" + _attemptId;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/topology/TridentBoltExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/topology/TridentBoltExecutor.java b/jstorm-client/src/main/java/storm/trident/topology/TridentBoltExecutor.java
deleted file mode 100644
index 71807bb..0000000
--- a/jstorm-client/src/main/java/storm/trident/topology/TridentBoltExecutor.java
+++ /dev/null
@@ -1,430 +0,0 @@
-package storm.trident.topology;
-
-import backtype.storm.Config;
-import backtype.storm.Constants;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.coordination.BatchOutputCollectorImpl;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.task.IOutputCollector;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.ReportedFailedException;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.RotatingMap;
-import backtype.storm.utils.Utils;
-
-import java.io.Serializable;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-
-import storm.trident.spout.IBatchID;
-
-public class TridentBoltExecutor implements IRichBolt {
- public static String COORD_STREAM_PREFIX = "$coord-";
-
- public static String COORD_STREAM(String batch) {
- return COORD_STREAM_PREFIX + batch;
- }
-
- public static class CoordType implements Serializable {
- public boolean singleCount;
-
- protected CoordType(boolean singleCount) {
- this.singleCount = singleCount;
- }
-
- public static CoordType single() {
- return new CoordType(true);
- }
-
- public static CoordType all() {
- return new CoordType(false);
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + (singleCount ? 1231 : 1237);
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- CoordType other = (CoordType) obj;
- if (singleCount != other.singleCount)
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return "<Single: " + singleCount + ">";
- }
-
-
- }
-
- public static class CoordSpec implements Serializable {
- public GlobalStreamId commitStream = null;
- public Map<String, CoordType> coords = new HashMap<String, CoordType>();
-
- public CoordSpec() {
- }
- }
-
- public static class CoordCondition implements Serializable {
- public GlobalStreamId commitStream;
- public int expectedTaskReports;
- Set<Integer> targetTasks;
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
- }
-
- Map<GlobalStreamId, String> _batchGroupIds;
- Map<String, CoordSpec> _coordSpecs;
- Map<String, CoordCondition> _coordConditions;
- ITridentBatchBolt _bolt;
- long _messageTimeoutMs;
- long _lastRotate;
-
- RotatingMap _batches;
-
- // map from batchgroupid to coordspec
- public TridentBoltExecutor(ITridentBatchBolt bolt, Map<GlobalStreamId, String> batchGroupIds, Map<String, CoordSpec> coordinationSpecs) {
- _batchGroupIds = batchGroupIds;
- _coordSpecs = coordinationSpecs;
- _bolt = bolt;
- }
-
- public static class TrackedBatch {
- int attemptId;
- BatchInfo info;
- CoordCondition condition;
- int reportedTasks = 0;
- int expectedTupleCount = 0;
- int receivedTuples = 0;
- Map<Integer, Integer> taskEmittedTuples = new HashMap();
- boolean failed = false;
- boolean receivedCommit;
- Tuple delayedAck = null;
-
- public TrackedBatch(BatchInfo info, CoordCondition condition, int attemptId) {
- this.info = info;
- this.condition = condition;
- this.attemptId = attemptId;
- receivedCommit = condition.commitStream == null;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
- }
-
- public class CoordinatedOutputCollector implements IOutputCollector {
- IOutputCollector _delegate;
-
- TrackedBatch _currBatch = null;;
-
- public void setCurrBatch(TrackedBatch batch) {
- _currBatch = batch;
- }
-
- public CoordinatedOutputCollector(IOutputCollector delegate) {
- _delegate = delegate;
- }
-
- public List<Integer> emit(String stream, Collection<Tuple> anchors, List<Object> tuple) {
- List<Integer> tasks = _delegate.emit(stream, anchors, tuple);
- updateTaskCounts(tasks);
- return tasks;
- }
-
- public void emitDirect(int task, String stream, Collection<Tuple> anchors, List<Object> tuple) {
- updateTaskCounts(Arrays.asList(task));
- _delegate.emitDirect(task, stream, anchors, tuple);
- }
-
- public void ack(Tuple tuple) {
- throw new IllegalStateException("Method should never be called");
- }
-
- public void fail(Tuple tuple) {
- throw new IllegalStateException("Method should never be called");
- }
-
- public void reportError(Throwable error) {
- _delegate.reportError(error);
- }
-
-
- private void updateTaskCounts(List<Integer> tasks) {
- if(_currBatch!=null) {
- Map<Integer, Integer> taskEmittedTuples = _currBatch.taskEmittedTuples;
- for(Integer task: tasks) {
- int newCount = Utils.get(taskEmittedTuples, task, 0) + 1;
- taskEmittedTuples.put(task, newCount);
- }
- }
- }
- }
-
- OutputCollector _collector;
- CoordinatedOutputCollector _coordCollector;
- BatchOutputCollector _coordOutputCollector;
- TopologyContext _context;
-
- @Override
- public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
- _messageTimeoutMs = context.maxTopologyMessageTimeout() * 1000L;
- _lastRotate = System.currentTimeMillis();
- _batches = new RotatingMap(2);
- _context = context;
- _collector = collector;
- _coordCollector = new CoordinatedOutputCollector(collector);
- _coordOutputCollector = new BatchOutputCollectorImpl(new OutputCollector(_coordCollector));
-
- _coordConditions = (Map) context.getExecutorData("__coordConditions");
- if(_coordConditions==null) {
- _coordConditions = new HashMap();
- for(String batchGroup: _coordSpecs.keySet()) {
- CoordSpec spec = _coordSpecs.get(batchGroup);
- CoordCondition cond = new CoordCondition();
- cond.commitStream = spec.commitStream;
- cond.expectedTaskReports = 0;
- for(String comp: spec.coords.keySet()) {
- CoordType ct = spec.coords.get(comp);
- if(ct.equals(CoordType.single())) {
- cond.expectedTaskReports+=1;
- } else {
- cond.expectedTaskReports+=context.getComponentTasks(comp).size();
- }
- }
- cond.targetTasks = new HashSet<Integer>();
- for(String component: Utils.get(context.getThisTargets(),
- COORD_STREAM(batchGroup),
- new HashMap<String, Grouping>()).keySet()) {
- cond.targetTasks.addAll(context.getComponentTasks(component));
- }
- _coordConditions.put(batchGroup, cond);
- }
- context.setExecutorData("_coordConditions", _coordConditions);
- }
- _bolt.prepare(conf, context, _coordOutputCollector);
- }
-
- private void failBatch(TrackedBatch tracked, FailedException e) {
- if(e!=null && e instanceof ReportedFailedException) {
- _collector.reportError(e);
- }
- tracked.failed = true;
- if(tracked.delayedAck!=null) {
- _collector.fail(tracked.delayedAck);
- tracked.delayedAck = null;
- }
- }
-
- private void failBatch(TrackedBatch tracked) {
- failBatch(tracked, null);
- }
-
- private boolean finishBatch(TrackedBatch tracked, Tuple finishTuple) {
- boolean success = true;
- try {
- _bolt.finishBatch(tracked.info);
- String stream = COORD_STREAM(tracked.info.batchGroup);
- for(Integer task: tracked.condition.targetTasks) {
- _collector.emitDirect(task, stream, finishTuple, new Values(tracked.info.batchId, Utils.get(tracked.taskEmittedTuples, task, 0)));
- }
- if(tracked.delayedAck!=null) {
- _collector.ack(tracked.delayedAck);
- tracked.delayedAck = null;
- }
- } catch(FailedException e) {
- failBatch(tracked, e);
- success = false;
- }
- _batches.remove(tracked.info.batchId.getId());
- return success;
- }
-
- private void checkFinish(TrackedBatch tracked, Tuple tuple, TupleType type) {
- if(tracked.failed) {
- failBatch(tracked);
- _collector.fail(tuple);
- return;
- }
- CoordCondition cond = tracked.condition;
- boolean delayed = tracked.delayedAck==null &&
- (cond.commitStream!=null && type==TupleType.COMMIT
- || cond.commitStream==null);
- if(delayed) {
- tracked.delayedAck = tuple;
- }
- boolean failed = false;
- if(tracked.receivedCommit && tracked.reportedTasks == cond.expectedTaskReports) {
- if(tracked.receivedTuples == tracked.expectedTupleCount) {
- finishBatch(tracked, tuple);
- } else {
- //TODO: add logging that not all tuples were received
- failBatch(tracked);
- _collector.fail(tuple);
- failed = true;
- }
- }
-
- if(!delayed && !failed) {
- _collector.ack(tuple);
- }
-
- }
-
- @Override
- public void execute(Tuple tuple) {
- if(tuple.getSourceStreamId().equals(Constants.SYSTEM_TICK_STREAM_ID)) {
- long now = System.currentTimeMillis();
- if(now - _lastRotate > _messageTimeoutMs) {
- _batches.rotate();
- _lastRotate = now;
- }
- return;
- }
- String batchGroup = _batchGroupIds.get(tuple.getSourceGlobalStreamid());
- if(batchGroup==null) {
- // this is so we can do things like have simple DRPC that doesn't need to use batch processing
- _coordCollector.setCurrBatch(null);
- _bolt.execute(null, tuple);
- _collector.ack(tuple);
- return;
- }
- IBatchID id = (IBatchID) tuple.getValue(0);
- //get transaction id
- //if it already exissts and attempt id is greater than the attempt there
-
-
- TrackedBatch tracked = (TrackedBatch) _batches.get(id.getId());
-// if(_batches.size() > 10 && _context.getThisTaskIndex() == 0) {
-// System.out.println("Received in " + _context.getThisComponentId() + " " + _context.getThisTaskIndex()
-// + " (" + _batches.size() + ")" +
-// "\ntuple: " + tuple +
-// "\nwith tracked " + tracked +
-// "\nwith id " + id +
-// "\nwith group " + batchGroup
-// + "\n");
-//
-// }
- //System.out.println("Num tracked: " + _batches.size() + " " + _context.getThisComponentId() + " " + _context.getThisTaskIndex());
-
- // this code here ensures that only one attempt is ever tracked for a batch, so when
- // failures happen you don't get an explosion in memory usage in the tasks
- if(tracked!=null) {
- if(id.getAttemptId() > tracked.attemptId) {
- _batches.remove(id.getId());
- tracked = null;
- } else if(id.getAttemptId() < tracked.attemptId) {
- // no reason to try to execute a previous attempt than we've already seen
- return;
- }
- }
-
- if(tracked==null) {
- tracked = new TrackedBatch(new BatchInfo(batchGroup, id, _bolt.initBatchState(batchGroup, id)), _coordConditions.get(batchGroup), id.getAttemptId());
- _batches.put(id.getId(), tracked);
- }
- _coordCollector.setCurrBatch(tracked);
-
- //System.out.println("TRACKED: " + tracked + " " + tuple);
-
- TupleType t = getTupleType(tuple, tracked);
- if(t==TupleType.COMMIT) {
- tracked.receivedCommit = true;
- checkFinish(tracked, tuple, t);
- } else if(t==TupleType.COORD) {
- int count = tuple.getInteger(1);
- tracked.reportedTasks++;
- tracked.expectedTupleCount+=count;
- checkFinish(tracked, tuple, t);
- } else {
- tracked.receivedTuples++;
- boolean success = true;
- try {
- _bolt.execute(tracked.info, tuple);
- if(tracked.condition.expectedTaskReports==0) {
- success = finishBatch(tracked, tuple);
- }
- } catch(FailedException e) {
- failBatch(tracked, e);
- }
- if(success) {
- _collector.ack(tuple);
- } else {
- _collector.fail(tuple);
- }
- }
- _coordCollector.setCurrBatch(null);
- }
-
- @Override
- public void cleanup() {
- _bolt.cleanup();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _bolt.declareOutputFields(declarer);
- for(String batchGroup: _coordSpecs.keySet()) {
- declarer.declareStream(COORD_STREAM(batchGroup), true, new Fields("id", "count"));
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Map<String, Object> ret = _bolt.getComponentConfiguration();
- if(ret==null) ret = new HashMap();
- ret.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 5);
- // TODO: Need to be able to set the tick tuple time to the message timeout, ideally without parameterization
- return ret;
- }
-
- private TupleType getTupleType(Tuple tuple, TrackedBatch batch) {
- CoordCondition cond = batch.condition;
- if(cond.commitStream!=null
- && tuple.getSourceGlobalStreamid().equals(cond.commitStream)) {
- return TupleType.COMMIT;
- } else if(cond.expectedTaskReports > 0
- && tuple.getSourceStreamId().startsWith(COORD_STREAM_PREFIX)) {
- return TupleType.COORD;
- } else {
- return TupleType.REGULAR;
- }
- }
-
- static enum TupleType {
- REGULAR,
- COMMIT,
- COORD
- }
-}
[29/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/WorkerSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/WorkerSummary.java b/jstorm-client/src/main/java/backtype/storm/generated/WorkerSummary.java
deleted file mode 100644
index 0bf878c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/WorkerSummary.java
+++ /dev/null
@@ -1,560 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class WorkerSummary implements org.apache.thrift7.TBase<WorkerSummary, WorkerSummary._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("WorkerSummary");
-
- private static final org.apache.thrift7.protocol.TField PORT_FIELD_DESC = new org.apache.thrift7.protocol.TField("port", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField TOPOLOGY_FIELD_DESC = new org.apache.thrift7.protocol.TField("topology", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField TASKS_FIELD_DESC = new org.apache.thrift7.protocol.TField("tasks", org.apache.thrift7.protocol.TType.LIST, (short)3);
-
- private int port; // required
- private String topology; // required
- private List<TaskSummary> tasks; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- PORT((short)1, "port"),
- TOPOLOGY((short)2, "topology"),
- TASKS((short)3, "tasks");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // PORT
- return PORT;
- case 2: // TOPOLOGY
- return TOPOLOGY;
- case 3: // TASKS
- return TASKS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __PORT_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.PORT, new org.apache.thrift7.meta_data.FieldMetaData("port", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.TOPOLOGY, new org.apache.thrift7.meta_data.FieldMetaData("topology", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.TASKS, new org.apache.thrift7.meta_data.FieldMetaData("tasks", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, TaskSummary.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(WorkerSummary.class, metaDataMap);
- }
-
- public WorkerSummary() {
- }
-
- public WorkerSummary(
- int port,
- String topology,
- List<TaskSummary> tasks)
- {
- this();
- this.port = port;
- set_port_isSet(true);
- this.topology = topology;
- this.tasks = tasks;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public WorkerSummary(WorkerSummary other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.port = other.port;
- if (other.is_set_topology()) {
- this.topology = other.topology;
- }
- if (other.is_set_tasks()) {
- List<TaskSummary> __this__tasks = new ArrayList<TaskSummary>();
- for (TaskSummary other_element : other.tasks) {
- __this__tasks.add(new TaskSummary(other_element));
- }
- this.tasks = __this__tasks;
- }
- }
-
- public WorkerSummary deepCopy() {
- return new WorkerSummary(this);
- }
-
- @Override
- public void clear() {
- set_port_isSet(false);
- this.port = 0;
- this.topology = null;
- this.tasks = null;
- }
-
- public int get_port() {
- return this.port;
- }
-
- public void set_port(int port) {
- this.port = port;
- set_port_isSet(true);
- }
-
- public void unset_port() {
- __isset_bit_vector.clear(__PORT_ISSET_ID);
- }
-
- /** Returns true if field port is set (has been assigned a value) and false otherwise */
- public boolean is_set_port() {
- return __isset_bit_vector.get(__PORT_ISSET_ID);
- }
-
- public void set_port_isSet(boolean value) {
- __isset_bit_vector.set(__PORT_ISSET_ID, value);
- }
-
- public String get_topology() {
- return this.topology;
- }
-
- public void set_topology(String topology) {
- this.topology = topology;
- }
-
- public void unset_topology() {
- this.topology = null;
- }
-
- /** Returns true if field topology is set (has been assigned a value) and false otherwise */
- public boolean is_set_topology() {
- return this.topology != null;
- }
-
- public void set_topology_isSet(boolean value) {
- if (!value) {
- this.topology = null;
- }
- }
-
- public int get_tasks_size() {
- return (this.tasks == null) ? 0 : this.tasks.size();
- }
-
- public java.util.Iterator<TaskSummary> get_tasks_iterator() {
- return (this.tasks == null) ? null : this.tasks.iterator();
- }
-
- public void add_to_tasks(TaskSummary elem) {
- if (this.tasks == null) {
- this.tasks = new ArrayList<TaskSummary>();
- }
- this.tasks.add(elem);
- }
-
- public List<TaskSummary> get_tasks() {
- return this.tasks;
- }
-
- public void set_tasks(List<TaskSummary> tasks) {
- this.tasks = tasks;
- }
-
- public void unset_tasks() {
- this.tasks = null;
- }
-
- /** Returns true if field tasks is set (has been assigned a value) and false otherwise */
- public boolean is_set_tasks() {
- return this.tasks != null;
- }
-
- public void set_tasks_isSet(boolean value) {
- if (!value) {
- this.tasks = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case PORT:
- if (value == null) {
- unset_port();
- } else {
- set_port((Integer)value);
- }
- break;
-
- case TOPOLOGY:
- if (value == null) {
- unset_topology();
- } else {
- set_topology((String)value);
- }
- break;
-
- case TASKS:
- if (value == null) {
- unset_tasks();
- } else {
- set_tasks((List<TaskSummary>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case PORT:
- return Integer.valueOf(get_port());
-
- case TOPOLOGY:
- return get_topology();
-
- case TASKS:
- return get_tasks();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case PORT:
- return is_set_port();
- case TOPOLOGY:
- return is_set_topology();
- case TASKS:
- return is_set_tasks();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof WorkerSummary)
- return this.equals((WorkerSummary)that);
- return false;
- }
-
- public boolean equals(WorkerSummary that) {
- if (that == null)
- return false;
-
- boolean this_present_port = true;
- boolean that_present_port = true;
- if (this_present_port || that_present_port) {
- if (!(this_present_port && that_present_port))
- return false;
- if (this.port != that.port)
- return false;
- }
-
- boolean this_present_topology = true && this.is_set_topology();
- boolean that_present_topology = true && that.is_set_topology();
- if (this_present_topology || that_present_topology) {
- if (!(this_present_topology && that_present_topology))
- return false;
- if (!this.topology.equals(that.topology))
- return false;
- }
-
- boolean this_present_tasks = true && this.is_set_tasks();
- boolean that_present_tasks = true && that.is_set_tasks();
- if (this_present_tasks || that_present_tasks) {
- if (!(this_present_tasks && that_present_tasks))
- return false;
- if (!this.tasks.equals(that.tasks))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_port = true;
- builder.append(present_port);
- if (present_port)
- builder.append(port);
-
- boolean present_topology = true && (is_set_topology());
- builder.append(present_topology);
- if (present_topology)
- builder.append(topology);
-
- boolean present_tasks = true && (is_set_tasks());
- builder.append(present_tasks);
- if (present_tasks)
- builder.append(tasks);
-
- return builder.toHashCode();
- }
-
- public int compareTo(WorkerSummary other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- WorkerSummary typedOther = (WorkerSummary)other;
-
- lastComparison = Boolean.valueOf(is_set_port()).compareTo(typedOther.is_set_port());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_port()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.port, typedOther.port);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_topology()).compareTo(typedOther.is_set_topology());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_topology()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.topology, typedOther.topology);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_tasks()).compareTo(typedOther.is_set_tasks());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_tasks()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.tasks, typedOther.tasks);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // PORT
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.port = iprot.readI32();
- set_port_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // TOPOLOGY
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.topology = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // TASKS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list189 = iprot.readListBegin();
- this.tasks = new ArrayList<TaskSummary>(_list189.size);
- for (int _i190 = 0; _i190 < _list189.size; ++_i190)
- {
- TaskSummary _elem191; // required
- _elem191 = new TaskSummary();
- _elem191.read(iprot);
- this.tasks.add(_elem191);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldBegin(PORT_FIELD_DESC);
- oprot.writeI32(this.port);
- oprot.writeFieldEnd();
- if (this.topology != null) {
- oprot.writeFieldBegin(TOPOLOGY_FIELD_DESC);
- oprot.writeString(this.topology);
- oprot.writeFieldEnd();
- }
- if (this.tasks != null) {
- oprot.writeFieldBegin(TASKS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.tasks.size()));
- for (TaskSummary _iter192 : this.tasks)
- {
- _iter192.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("WorkerSummary(");
- boolean first = true;
-
- sb.append("port:");
- sb.append(this.port);
- first = false;
- if (!first) sb.append(", ");
- sb.append("topology:");
- if (this.topology == null) {
- sb.append("null");
- } else {
- sb.append(this.topology);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("tasks:");
- if (this.tasks == null) {
- sb.append("null");
- } else {
- sb.append(this.tasks);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_port()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'port' is unset! Struct:" + toString());
- }
-
- if (!is_set_topology()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'topology' is unset! Struct:" + toString());
- }
-
- if (!is_set_tasks()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'tasks' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java b/jstorm-client/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
deleted file mode 100644
index 15e37a8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/grouping/CustomStreamGrouping.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package backtype.storm.grouping;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.task.WorkerTopologyContext;
-import java.io.Serializable;
-import java.util.List;
-
-public interface CustomStreamGrouping extends Serializable {
-
- /**
- * Tells the stream grouping at runtime the tasks in the target bolt. This
- * information should be used in chooseTasks to determine the target tasks.
- *
- * It also tells the grouping the metadata on the stream this grouping will
- * be used on.
- */
- void prepare(WorkerTopologyContext context, GlobalStreamId stream,
- List<Integer> targetTasks);
-
- /**
- * This function implements a custom stream grouping. It takes in as input
- * the number of tasks in the target bolt in prepare and returns the tasks
- * to send the tuples to.
- *
- * @param values
- * the values to group on
- */
- List<Integer> chooseTasks(int taskId, List<Object> values);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/BaseTaskHook.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/BaseTaskHook.java b/jstorm-client/src/main/java/backtype/storm/hooks/BaseTaskHook.java
deleted file mode 100644
index a2aac33..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/BaseTaskHook.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package backtype.storm.hooks;
-
-import backtype.storm.hooks.info.BoltAckInfo;
-import backtype.storm.hooks.info.BoltExecuteInfo;
-import backtype.storm.hooks.info.BoltFailInfo;
-import backtype.storm.hooks.info.EmitInfo;
-import backtype.storm.hooks.info.SpoutAckInfo;
-import backtype.storm.hooks.info.SpoutFailInfo;
-import backtype.storm.task.TopologyContext;
-import java.util.Map;
-
-public class BaseTaskHook implements ITaskHook {
- @Override
- public void prepare(Map conf, TopologyContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
- @Override
- public void emit(EmitInfo info) {
- }
-
- @Override
- public void spoutAck(SpoutAckInfo info) {
- }
-
- @Override
- public void spoutFail(SpoutFailInfo info) {
- }
-
- @Override
- public void boltAck(BoltAckInfo info) {
- }
-
- @Override
- public void boltFail(BoltFailInfo info) {
- }
-
- @Override
- public void boltExecute(BoltExecuteInfo info) {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/ITaskHook.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/ITaskHook.java b/jstorm-client/src/main/java/backtype/storm/hooks/ITaskHook.java
deleted file mode 100644
index f705f12..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/ITaskHook.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.hooks;
-
-import backtype.storm.hooks.info.BoltAckInfo;
-import backtype.storm.hooks.info.BoltExecuteInfo;
-import backtype.storm.hooks.info.SpoutFailInfo;
-import backtype.storm.hooks.info.SpoutAckInfo;
-import backtype.storm.hooks.info.EmitInfo;
-import backtype.storm.hooks.info.BoltFailInfo;
-import backtype.storm.task.TopologyContext;
-import java.util.Map;
-
-public interface ITaskHook {
- void prepare(Map conf, TopologyContext context);
-
- void cleanup();
-
- void emit(EmitInfo info);
-
- void spoutAck(SpoutAckInfo info);
-
- void spoutFail(SpoutFailInfo info);
-
- void boltExecute(BoltExecuteInfo info);
-
- void boltAck(BoltAckInfo info);
-
- void boltFail(BoltFailInfo info);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
deleted file mode 100644
index b0f0a9b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltAckInfo.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.hooks.info;
-
-import backtype.storm.tuple.Tuple;
-
-public class BoltAckInfo {
- public Tuple tuple;
- public int ackingTaskId;
- public Long processLatencyMs; // null if it wasn't sampled
-
- public BoltAckInfo(Tuple tuple, int ackingTaskId, Long processLatencyMs) {
- this.tuple = tuple;
- this.ackingTaskId = ackingTaskId;
- this.processLatencyMs = processLatencyMs;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
deleted file mode 100644
index 31ca373..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltExecuteInfo.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.hooks.info;
-
-import backtype.storm.tuple.Tuple;
-
-public class BoltExecuteInfo {
- public Tuple tuple;
- public int executingTaskId;
- public Long executeLatencyMs; // null if it wasn't sampled
-
- public BoltExecuteInfo(Tuple tuple, int executingTaskId,
- Long executeLatencyMs) {
- this.tuple = tuple;
- this.executingTaskId = executingTaskId;
- this.executeLatencyMs = executeLatencyMs;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
deleted file mode 100644
index 3a3dfec..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/BoltFailInfo.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.hooks.info;
-
-import backtype.storm.tuple.Tuple;
-
-public class BoltFailInfo {
- public Tuple tuple;
- public int failingTaskId;
- public Long failLatencyMs; // null if it wasn't sampled
-
- public BoltFailInfo(Tuple tuple, int failingTaskId, Long failLatencyMs) {
- this.tuple = tuple;
- this.failingTaskId = failingTaskId;
- this.failLatencyMs = failLatencyMs;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/EmitInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/EmitInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/EmitInfo.java
deleted file mode 100644
index 39b9688..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/EmitInfo.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package backtype.storm.hooks.info;
-
-import java.util.Collection;
-import java.util.List;
-
-public class EmitInfo {
- public List<Object> values;
- public String stream;
- public int taskId;
- public Collection<Integer> outTasks;
-
- public EmitInfo(List<Object> values, String stream, int taskId,
- Collection<Integer> outTasks) {
- this.values = values;
- this.stream = stream;
- this.taskId = taskId;
- this.outTasks = outTasks;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
deleted file mode 100644
index f74efae..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutAckInfo.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.hooks.info;
-
-public class SpoutAckInfo {
- public Object messageId;
- public int spoutTaskId;
- public Long completeLatencyMs; // null if it wasn't sampled
-
- public SpoutAckInfo(Object messageId, int spoutTaskId,
- Long completeLatencyMs) {
- this.messageId = messageId;
- this.spoutTaskId = spoutTaskId;
- this.completeLatencyMs = completeLatencyMs;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java b/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
deleted file mode 100644
index 8052b4a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/hooks/info/SpoutFailInfo.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package backtype.storm.hooks.info;
-
-public class SpoutFailInfo {
- public Object messageId;
- public int spoutTaskId;
- public Long failLatencyMs; // null if it wasn't sampled
-
- public SpoutFailInfo(Object messageId, int spoutTaskId, Long failLatencyMs) {
- this.messageId = messageId;
- this.spoutTaskId = spoutTaskId;
- this.failLatencyMs = failLatencyMs;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/messaging/IConnection.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/messaging/IConnection.java b/jstorm-client/src/main/java/backtype/storm/messaging/IConnection.java
deleted file mode 100644
index f61e818..0000000
--- a/jstorm-client/src/main/java/backtype/storm/messaging/IConnection.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package backtype.storm.messaging;
-
-import java.util.List;
-
-import backtype.storm.utils.DisruptorQueue;
-
-public interface IConnection {
-
- /**
- * (flags != 1) synchronously
- * (flags==1) asynchronously
- *
- * @param flags
- * @return
- */
- public TaskMessage recv(int flags);
-
- /**
- * In the new design, receive flow is through registerQueue,
- * then push message into queue
- *
- * @param recvQueu
- */
- public void registerQueue(DisruptorQueue recvQueu);
- public void enqueue(TaskMessage message);
-
- public void send(List<TaskMessage> messages);
- public void send(TaskMessage message);
-
- /**
- * close this connection
- */
- public void close();
-
- public boolean isClosed();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/messaging/IContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/messaging/IContext.java b/jstorm-client/src/main/java/backtype/storm/messaging/IContext.java
deleted file mode 100644
index 760b6e5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/messaging/IContext.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package backtype.storm.messaging;
-
-import java.util.Map;
-
-import backtype.storm.utils.DisruptorQueue;
-
-/**
- * This interface needs to be implemented for messaging plugin.
- *
- * Messaging plugin is specified via Storm config parameter,
- * storm.messaging.transport.
- *
- * A messaging plugin should have a default constructor and implements IContext
- * interface. Upon construction, we will invoke IContext::prepare(storm_conf) to
- * enable context to be configured according to storm configuration.
- */
-public interface IContext {
- /**
- * This method is invoked at the startup of messaging plugin
- *
- * @param storm_conf
- * storm configuration
- */
- public void prepare(Map storm_conf);
-
- /**
- * This method is invoked when a worker is unload a messaging plugin
- */
- public void term();
-
- /**
- * This method establishes a server side connection
- *
- * @param topology_id
- * topology ID
- * @param port
- * port #
- * @param distribute
- * true -- receive other worker's data
- * @return server side connection
- */
- public IConnection bind(String topology_id, int port);
-
- /**
- * This method establish a client side connection to a remote server
- *
- * @param topology_id
- * topology ID
- * @param host
- * remote host
- * @param port
- * remote port
- * @param distribute
- * true -- send other worker data
- * @return client side connection
- */
- public IConnection connect(String topology_id, String host, int port);
-};
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/messaging/TaskMessage.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/messaging/TaskMessage.java b/jstorm-client/src/main/java/backtype/storm/messaging/TaskMessage.java
deleted file mode 100644
index cab968f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/messaging/TaskMessage.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package backtype.storm.messaging;
-
-import java.nio.ByteBuffer;
-
-public class TaskMessage {
- private int _task;
- private byte[] _message;
-
- public TaskMessage(int task, byte[] message) {
- _task = task;
- _message = message;
- }
-
- public int task() {
- return _task;
- }
-
- public byte[] message() {
- return _message;
- }
-
- public static boolean isEmpty(TaskMessage message) {
- if (message == null) {
- return true;
- }else if (message.message() == null) {
- return true;
- }else if (message.message().length == 0) {
- return true;
- }
-
- return false;
- }
-
- @Deprecated
- public ByteBuffer serialize() {
- ByteBuffer bb = ByteBuffer.allocate(_message.length + 2);
- bb.putShort((short) _task);
- bb.put(_message);
- return bb;
- }
-
- @Deprecated
- public void deserialize(ByteBuffer packet) {
- if (packet == null)
- return;
- _task = packet.getShort();
- _message = new byte[packet.limit() - 2];
- packet.get(_message);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/messaging/TransportFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/messaging/TransportFactory.java b/jstorm-client/src/main/java/backtype/storm/messaging/TransportFactory.java
deleted file mode 100644
index 8830496..0000000
--- a/jstorm-client/src/main/java/backtype/storm/messaging/TransportFactory.java
+++ /dev/null
@@ -1,54 +0,0 @@
-package backtype.storm.messaging;
-
-import java.lang.reflect.Constructor;
-import java.lang.reflect.Method;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-
-public class TransportFactory {
- public static final Logger LOG = Logger.getLogger(TransportFactory.class);
-
- public static IContext makeContext(Map storm_conf) {
-
- // get factory class name
- String transport_plugin_klassName = (String) storm_conf
- .get(Config.STORM_MESSAGING_TRANSPORT);
- LOG.info("JStorm peer transport plugin:" + transport_plugin_klassName);
-
- IContext transport = null;
- try {
- // create a factory class
- Class klass = Class.forName(transport_plugin_klassName);
- // obtain a context object
- // Object obj = klass.newInstance();
- Constructor constructor = klass.getDeclaredConstructor();
- constructor.setAccessible(true);
- Object obj = constructor.newInstance();
- LOG.info("TransportFactory makeContext: new klass: " + obj);
- if (obj instanceof IContext) {
- // case 1: plugin is a IContext class
- transport = (IContext) obj;
- // initialize with storm configuration
- transport.prepare(storm_conf);
- LOG.info("TransportFactory makeContext: start prepare... "
- + storm_conf);
- } else {
- // case 2: Non-IContext plugin must have a
- // makeContext(storm_conf) method that returns IContext object
- Method method = klass.getMethod("makeContext", Map.class);
- LOG.debug("object:" + obj + " method:" + method);
- transport = (IContext) method.invoke(obj, storm_conf);
- }
- LOG.info("TransportFactory makeContext done...");
- } catch (Exception e) {
- throw new RuntimeException(
- "Fail to construct messaging plugin from plugin "
- + transport_plugin_klassName, e);
- }
- return transport;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/LoggingMetricsConsumer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/LoggingMetricsConsumer.java b/jstorm-client/src/main/java/backtype/storm/metric/LoggingMetricsConsumer.java
deleted file mode 100644
index 19c2235..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/LoggingMetricsConsumer.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package backtype.storm.metric;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Collection;
-import java.util.Map;
-
-import backtype.storm.metric.api.IMetricsConsumer;
-import backtype.storm.task.IErrorReporter;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.utils.Utils;
-
-/*
- * Listens for all metrics, dumps them to log
- *
- * To use, add this to your topology's configuration:
- * conf.registerMetricsConsumer(backtype.storm.metrics.LoggingMetricsConsumer.class, 1);
- *
- * Or edit the storm.yaml config file:
- *
- * topology.metrics.consumer.register:
- * - class: "backtype.storm.metrics.LoggingMetricsConsumer"
- * parallelism.hint: 1
- *
- */
-public class LoggingMetricsConsumer implements IMetricsConsumer {
- public static final Logger LOG = LoggerFactory
- .getLogger(LoggingMetricsConsumer.class);
-
- @Override
- public void prepare(Map stormConf, Object registrationArgument,
- TopologyContext context, IErrorReporter errorReporter) {
- }
-
- static private String padding = " ";
-
- @Override
- public void handleDataPoints(TaskInfo taskInfo,
- Collection<DataPoint> dataPoints) {
- StringBuilder sb = new StringBuilder();
- String header = String.format("%d\t%15s:%-4d\t%3d:%-11s\t",
- taskInfo.timestamp, taskInfo.srcWorkerHost,
- taskInfo.srcWorkerPort, taskInfo.srcTaskId,
- taskInfo.srcComponentId);
- sb.append(header);
- for (DataPoint p : dataPoints) {
- sb.delete(header.length(), sb.length());
- sb.append(p.name).append(padding)
- .delete(header.length() + 23, sb.length()).append("\t")
- .append(p.value);
- LOG.info(sb.toString());
- }
- }
-
- @Override
- public void cleanup() {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/MetricsConsumerBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/MetricsConsumerBolt.java b/jstorm-client/src/main/java/backtype/storm/metric/MetricsConsumerBolt.java
deleted file mode 100644
index 994cb56..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/MetricsConsumerBolt.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package backtype.storm.metric;
-
-import backtype.storm.Config;
-import backtype.storm.metric.api.IMetricsConsumer;
-import backtype.storm.task.IBolt;
-import backtype.storm.task.IErrorReporter;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import java.util.Collection;
-import java.util.Map;
-
-public class MetricsConsumerBolt implements IBolt {
- IMetricsConsumer _metricsConsumer;
- String _consumerClassName;
- OutputCollector _collector;
- Object _registrationArgument;
-
- public MetricsConsumerBolt(String consumerClassName,
- Object registrationArgument) {
- _consumerClassName = consumerClassName;
- _registrationArgument = registrationArgument;
- }
-
- @Override
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- try {
- _metricsConsumer = (IMetricsConsumer) Class.forName(
- _consumerClassName).newInstance();
- } catch (Exception e) {
- throw new RuntimeException(
- "Could not instantiate a class listed in config under section "
- + Config.TOPOLOGY_METRICS_CONSUMER_REGISTER
- + " with fully qualified name "
- + _consumerClassName, e);
- }
- _metricsConsumer.prepare(stormConf, _registrationArgument, context,
- (IErrorReporter) collector);
- _collector = collector;
- }
-
- @Override
- public void execute(Tuple input) {
- _metricsConsumer.handleDataPoints(
- (IMetricsConsumer.TaskInfo) input.getValue(0),
- (Collection) input.getValue(1));
- _collector.ack(input);
- }
-
- @Override
- public void cleanup() {
- _metricsConsumer.cleanup();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/SystemBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/SystemBolt.java b/jstorm-client/src/main/java/backtype/storm/metric/SystemBolt.java
deleted file mode 100644
index 07bdc28..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/SystemBolt.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package backtype.storm.metric;
-
-import backtype.storm.Config;
-import backtype.storm.metric.api.AssignableMetric;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.task.IBolt;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import clojure.lang.AFn;
-import clojure.lang.IFn;
-import clojure.lang.RT;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.management.*;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-// There is one task inside one executor for each worker of the topology.
-// TaskID is always -1, therefore you can only send-unanchored tuples to co-located SystemBolt.
-// This bolt was conceived to export worker stats via metrics api.
-public class SystemBolt implements IBolt {
- private static Logger LOG = LoggerFactory.getLogger(SystemBolt.class);
- private static boolean _prepareWasCalled = false;
-
- private static class MemoryUsageMetric implements IMetric {
- IFn _getUsage;
-
- public MemoryUsageMetric(IFn getUsage) {
- _getUsage = getUsage;
- }
-
- @Override
- public Object getValueAndReset() {
- MemoryUsage memUsage;
- try {
- memUsage = (MemoryUsage) _getUsage.invoke();
- } catch (Exception e) {
- LOG.error("Failed to get userage ", e);
- throw new RuntimeException(e);
- }
- HashMap m = new HashMap();
- m.put("maxBytes", memUsage.getMax());
- m.put("committedBytes", memUsage.getCommitted());
- m.put("initBytes", memUsage.getInit());
- m.put("usedBytes", memUsage.getUsed());
- m.put("virtualFreeBytes", memUsage.getMax() - memUsage.getUsed());
- m.put("unusedBytes", memUsage.getCommitted() - memUsage.getUsed());
- return m;
- }
- }
-
- // canonically the metrics data exported is time bucketed when doing counts.
- // convert the absolute values here into time buckets.
- private static class GarbageCollectorMetric implements IMetric {
- GarbageCollectorMXBean _gcBean;
- Long _collectionCount;
- Long _collectionTime;
-
- public GarbageCollectorMetric(GarbageCollectorMXBean gcBean) {
- _gcBean = gcBean;
- }
-
- @Override
- public Object getValueAndReset() {
- Long collectionCountP = _gcBean.getCollectionCount();
- Long collectionTimeP = _gcBean.getCollectionTime();
-
- Map ret = null;
- if (_collectionCount != null && _collectionTime != null) {
- ret = new HashMap();
- ret.put("count", collectionCountP - _collectionCount);
- ret.put("timeMs", collectionTimeP - _collectionTime);
- }
-
- _collectionCount = collectionCountP;
- _collectionTime = collectionTimeP;
- return ret;
- }
- }
-
- @Override
- public void prepare(final Map stormConf, TopologyContext context,
- OutputCollector collector) {
- if (_prepareWasCalled
- && !"local".equals(stormConf.get(Config.STORM_CLUSTER_MODE))) {
- throw new RuntimeException(
- "A single worker should have 1 SystemBolt instance.");
- }
- _prepareWasCalled = true;
-
- int bucketSize = RT.intCast(stormConf
- .get(Config.TOPOLOGY_BUILTIN_METRICS_BUCKET_SIZE_SECS));
-
- final RuntimeMXBean jvmRT = ManagementFactory.getRuntimeMXBean();
-
- context.registerMetric("uptimeSecs", new IMetric() {
- @Override
- public Object getValueAndReset() {
- return jvmRT.getUptime() / 1000.0;
- }
- }, bucketSize);
-
- context.registerMetric("startTimeSecs", new IMetric() {
- @Override
- public Object getValueAndReset() {
- return jvmRT.getStartTime() / 1000.0;
- }
- }, bucketSize);
-
- context.registerMetric("newWorkerEvent", new IMetric() {
- boolean doEvent = true;
-
- @Override
- public Object getValueAndReset() {
- if (doEvent) {
- doEvent = false;
- return 1;
- } else
- return 0;
- }
- }, bucketSize);
-
- final MemoryMXBean jvmMemRT = ManagementFactory.getMemoryMXBean();
-
- context.registerMetric("memory/heap", new MemoryUsageMetric(new AFn() {
- public Object invoke() {
- return jvmMemRT.getHeapMemoryUsage();
- }
- }), bucketSize);
- context.registerMetric("memory/nonHeap", new MemoryUsageMetric(
- new AFn() {
- public Object invoke() {
- return jvmMemRT.getNonHeapMemoryUsage();
- }
- }), bucketSize);
-
- for (GarbageCollectorMXBean b : ManagementFactory
- .getGarbageCollectorMXBeans()) {
- context.registerMetric("GC/" + b.getName().replaceAll("\\W", ""),
- new GarbageCollectorMetric(b), bucketSize);
- }
- }
-
- @Override
- public void execute(Tuple input) {
- throw new RuntimeException(
- "Non-system tuples should never be sent to __system bolt.");
- }
-
- @Override
- public void cleanup() {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/AssignableMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/AssignableMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/AssignableMetric.java
deleted file mode 100644
index ed6dc72..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/AssignableMetric.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package backtype.storm.metric.api;
-
-public class AssignableMetric implements IMetric {
- Object _value;
-
- public AssignableMetric(Object value) {
- _value = value;
- }
-
- public void setValue(Object value) {
- _value = value;
- }
-
- public Object getValueAndReset() {
- return _value;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/CombinedMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/CombinedMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/CombinedMetric.java
deleted file mode 100644
index cf74184..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/CombinedMetric.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package backtype.storm.metric.api;
-
-public class CombinedMetric implements IMetric {
- private final ICombiner _combiner;
- private Object _value;
-
- public CombinedMetric(ICombiner combiner) {
- _combiner = combiner;
- _value = _combiner.identity();
- }
-
- public void update(Object value) {
- _value = _combiner.combine(_value, value);
- }
-
- public Object getValueAndReset() {
- Object ret = _value;
- _value = _combiner.identity();
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/CountMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/CountMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/CountMetric.java
deleted file mode 100644
index 12694cd..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/CountMetric.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package backtype.storm.metric.api;
-
-import backtype.storm.metric.api.IMetric;
-
-public class CountMetric implements IMetric {
- long _value = 0;
-
- public CountMetric() {
- }
-
- public void incr() {
- _value++;
- }
-
- public void incrBy(long incrementBy) {
- _value += incrementBy;
- }
-
- public Object getValueAndReset() {
- long ret = _value;
- _value = 0;
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/ICombiner.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/ICombiner.java b/jstorm-client/src/main/java/backtype/storm/metric/api/ICombiner.java
deleted file mode 100644
index cdc9363..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/ICombiner.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.metric.api;
-
-public interface ICombiner<T> {
- public T identity();
-
- public T combine(T a, T b);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/IMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/IMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/IMetric.java
deleted file mode 100644
index cd50757..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/IMetric.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package backtype.storm.metric.api;
-
-public interface IMetric {
- public Object getValueAndReset();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/IMetricsConsumer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/IMetricsConsumer.java b/jstorm-client/src/main/java/backtype/storm/metric/api/IMetricsConsumer.java
deleted file mode 100644
index 51b8d5b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/IMetricsConsumer.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package backtype.storm.metric.api;
-
-import backtype.storm.task.IErrorReporter;
-import backtype.storm.task.TopologyContext;
-import java.util.Collection;
-import java.util.Map;
-
-public interface IMetricsConsumer {
- public static class TaskInfo {
- public TaskInfo() {
- }
-
- public TaskInfo(String srcWorkerHost, int srcWorkerPort,
- String srcComponentId, int srcTaskId, long timestamp,
- int updateIntervalSecs) {
- this.srcWorkerHost = srcWorkerHost;
- this.srcWorkerPort = srcWorkerPort;
- this.srcComponentId = srcComponentId;
- this.srcTaskId = srcTaskId;
- this.timestamp = timestamp;
- this.updateIntervalSecs = updateIntervalSecs;
- }
-
- public String srcWorkerHost;
- public int srcWorkerPort;
- public String srcComponentId;
- public int srcTaskId;
- public long timestamp;
- public int updateIntervalSecs;
- }
-
- public static class DataPoint {
- public DataPoint() {
- }
-
- public DataPoint(String name, Object value) {
- this.name = name;
- this.value = value;
- }
-
- @Override
- public String toString() {
- return "[" + name + " = " + value + "]";
- }
-
- public String name;
- public Object value;
- }
-
- void prepare(Map stormConf, Object registrationArgument,
- TopologyContext context, IErrorReporter errorReporter);
-
- void handleDataPoints(TaskInfo taskInfo, Collection<DataPoint> dataPoints);
-
- void cleanup();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/IReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/IReducer.java b/jstorm-client/src/main/java/backtype/storm/metric/api/IReducer.java
deleted file mode 100644
index fe221ae..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/IReducer.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.metric.api;
-
-public interface IReducer<T> {
- T init();
-
- T reduce(T accumulator, Object input);
-
- Object extractResult(T accumulator);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/IStatefulObject.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/IStatefulObject.java b/jstorm-client/src/main/java/backtype/storm/metric/api/IStatefulObject.java
deleted file mode 100644
index ab37b2c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/IStatefulObject.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package backtype.storm.metric.api;
-
-public interface IStatefulObject {
- Object getState();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/MeanReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/MeanReducer.java b/jstorm-client/src/main/java/backtype/storm/metric/api/MeanReducer.java
deleted file mode 100644
index 86f4593..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/MeanReducer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package backtype.storm.metric.api;
-
-import backtype.storm.metric.api.IReducer;
-
-class MeanReducerState {
- public int count = 0;
- public double sum = 0.0;
-}
-
-public class MeanReducer implements IReducer<MeanReducerState> {
- public MeanReducerState init() {
- return new MeanReducerState();
- }
-
- public MeanReducerState reduce(MeanReducerState acc, Object input) {
- acc.count++;
- if (input instanceof Double) {
- acc.sum += (Double) input;
- } else if (input instanceof Long) {
- acc.sum += ((Long) input).doubleValue();
- } else if (input instanceof Integer) {
- acc.sum += ((Integer) input).doubleValue();
- } else {
- throw new RuntimeException(
- "MeanReducer::reduce called with unsupported input type `"
- + input.getClass()
- + "`. Supported types are Double, Long, Integer.");
- }
- return acc;
- }
-
- public Object extractResult(MeanReducerState acc) {
- if (acc.count > 0) {
- return new Double(acc.sum / (double) acc.count);
- } else {
- return null;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/MultiCountMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/MultiCountMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/MultiCountMetric.java
deleted file mode 100644
index f550eeb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/MultiCountMetric.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.metric.api;
-
-import backtype.storm.metric.api.IMetric;
-import java.util.HashMap;
-import java.util.Map;
-
-public class MultiCountMetric implements IMetric {
- Map<String, CountMetric> _value = new HashMap();
-
- public MultiCountMetric() {
- }
-
- public CountMetric scope(String key) {
- CountMetric val = _value.get(key);
- if (val == null) {
- _value.put(key, val = new CountMetric());
- }
- return val;
- }
-
- public Object getValueAndReset() {
- Map ret = new HashMap();
- for (Map.Entry<String, CountMetric> e : _value.entrySet()) {
- ret.put(e.getKey(), e.getValue().getValueAndReset());
- }
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/MultiReducedMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/MultiReducedMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/MultiReducedMetric.java
deleted file mode 100644
index 5020fd8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/MultiReducedMetric.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package backtype.storm.metric.api;
-
-import backtype.storm.metric.api.IMetric;
-import java.util.HashMap;
-import java.util.Map;
-
-public class MultiReducedMetric implements IMetric {
- Map<String, ReducedMetric> _value = new HashMap();
- IReducer _reducer;
-
- public MultiReducedMetric(IReducer reducer) {
- _reducer = reducer;
- }
-
- public ReducedMetric scope(String key) {
- ReducedMetric val = _value.get(key);
- if (val == null) {
- _value.put(key, val = new ReducedMetric(_reducer));
- }
- return val;
- }
-
- public Object getValueAndReset() {
- Map ret = new HashMap();
- for (Map.Entry<String, ReducedMetric> e : _value.entrySet()) {
- Object val = e.getValue().getValueAndReset();
- if (val != null) {
- ret.put(e.getKey(), val);
- }
- }
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/ReducedMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/ReducedMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/ReducedMetric.java
deleted file mode 100644
index b2a7bf8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/ReducedMetric.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package backtype.storm.metric.api;
-
-public class ReducedMetric implements IMetric {
- private final IReducer _reducer;
- private Object _accumulator;
-
- public ReducedMetric(IReducer reducer) {
- _reducer = reducer;
- _accumulator = _reducer.init();
- }
-
- public void update(Object value) {
- _accumulator = _reducer.reduce(_accumulator, value);
- }
-
- public Object getValueAndReset() {
- Object ret = _reducer.extractResult(_accumulator);
- _accumulator = _reducer.init();
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/StateMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/StateMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/StateMetric.java
deleted file mode 100644
index 48170ff..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/StateMetric.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.metric.api;
-
-public class StateMetric implements IMetric {
- private IStatefulObject _obj;
-
- public StateMetric(IStatefulObject obj) {
- _obj = obj;
- }
-
- @Override
- public Object getValueAndReset() {
- return _obj.getState();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/AssignableShellMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/AssignableShellMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/AssignableShellMetric.java
deleted file mode 100644
index 20387ed..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/AssignableShellMetric.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.metric.api.rpc;
-
-import backtype.storm.metric.api.AssignableMetric;
-
-public class AssignableShellMetric extends AssignableMetric implements IShellMetric {
- public AssignableShellMetric(Object value) {
- super(value);
- }
-
- public void updateMetricFromRPC(Object value) {
- setValue(value);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CombinedShellMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CombinedShellMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CombinedShellMetric.java
deleted file mode 100644
index 231c571..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CombinedShellMetric.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.metric.api.rpc;
-
-import backtype.storm.metric.api.CombinedMetric;
-import backtype.storm.metric.api.ICombiner;
-
-public class CombinedShellMetric extends CombinedMetric implements IShellMetric {
- public CombinedShellMetric(ICombiner combiner) {
- super(combiner);
- }
-
- public void updateMetricFromRPC(Object value) {
- update(value);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CountShellMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CountShellMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CountShellMetric.java
deleted file mode 100644
index def74c2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/CountShellMetric.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.metric.api.rpc;
-
-import backtype.storm.metric.api.CountMetric;
-
-public class CountShellMetric extends CountMetric implements IShellMetric {
- /***
- * @param
- * params should be null or long
- * if value is null, it will call incr()
- * if value is long, it will call incrBy((long)params)
- * */
- public void updateMetricFromRPC(Object value) {
- if (value == null) {
- incr();
- } else if (value instanceof Long) {
- incrBy((Long)value);
- } else {
- throw new RuntimeException("CountShellMetric updateMetricFromRPC params should be null or Long");
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/IShellMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/IShellMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/IShellMetric.java
deleted file mode 100644
index d53baea..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/IShellMetric.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.metric.api.rpc;
-
-import backtype.storm.metric.api.IMetric;
-
-public interface IShellMetric extends IMetric {
- /***
- * @function
- * This interface is used by ShellBolt and ShellSpout through RPC call to update Metric
- * @param
- * value used to update metric, its's meaning change according implementation
- * Object can be any json support types: String, Long, Double, Boolean, Null, List, Map
- * */
- public void updateMetricFromRPC(Object value);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/ReducedShellMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/ReducedShellMetric.java b/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/ReducedShellMetric.java
deleted file mode 100644
index 097ed51..0000000
--- a/jstorm-client/src/main/java/backtype/storm/metric/api/rpc/ReducedShellMetric.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.metric.api.rpc;
-
-import backtype.storm.metric.api.IReducer;
-import backtype.storm.metric.api.ReducedMetric;
-
-public class ReducedShellMetric extends ReducedMetric implements IShellMetric {
-
- public ReducedShellMetric(IReducer reducer) {
- super(reducer);
- }
-
- public void updateMetricFromRPC(Object value) {
- update(value);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/BoltMsg.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/BoltMsg.java b/jstorm-client/src/main/java/backtype/storm/multilang/BoltMsg.java
deleted file mode 100644
index 446bdc4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/BoltMsg.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-import java.util.List;
-
-/**
- * BoltMsg is an object that represents the data sent from a shell component to
- * a bolt process that implements a multi-language protocol. It is the union of
- * all data types that a bolt can receive from Storm.
- *
- * <p>
- * BoltMsgs are objects sent to the ISerializer interface, for serialization
- * according to the wire protocol implemented by the serializer. The BoltMsg
- * class allows for a decoupling between the serialized representation of the
- * data and the data itself.
- * </p>
- */
-public class BoltMsg {
- private String id;
- private String comp;
- private String stream;
- private long task;
- private List<Object> tuple;
-
- public String getId() {
- return id;
- }
-
- public void setId(String id) {
- this.id = id;
- }
-
- public String getComp() {
- return comp;
- }
-
- public void setComp(String comp) {
- this.comp = comp;
- }
-
- public String getStream() {
- return stream;
- }
-
- public void setStream(String stream) {
- this.stream = stream;
- }
-
- public long getTask() {
- return task;
- }
-
- public void setTask(long task) {
- this.task = task;
- }
-
- public List<Object> getTuple() {
- return tuple;
- }
-
- public void setTuple(List<Object> tuple) {
- this.tuple = tuple;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/ISerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/ISerializer.java b/jstorm-client/src/main/java/backtype/storm/multilang/ISerializer.java
deleted file mode 100644
index c9c7ad4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/ISerializer.java
+++ /dev/null
@@ -1,82 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-
-import backtype.storm.task.TopologyContext;
-
-/**
- * The ISerializer interface describes the methods that an object should
- * implement to provide serialization and de-serialization capabilities to
- * non-JVM language components.
- */
-public interface ISerializer extends Serializable {
-
- /**
- * This method sets the input and output streams of the serializer
- *
- * @param processIn output stream to non-JVM component
- * @param processOut input stream from non-JVM component
- */
- void initialize(OutputStream processIn, InputStream processOut);
-
- /**
- * This method transmits the Storm config to the non-JVM process and
- * receives its pid.
- *
- * @param conf storm configuration
- * @param context topology context
- * @return process pid
- */
- Number connect(Map conf, TopologyContext context) throws IOException,
- NoOutputException;
-
- /**
- * This method receives a shell message from the non-JVM process
- *
- * @return shell message
- */
- ShellMsg readShellMsg() throws IOException, NoOutputException;
-
- /**
- * This method sends a bolt message to a non-JVM bolt process
- *
- * @param msg bolt message
- */
- void writeBoltMsg(BoltMsg msg) throws IOException;
-
- /**
- * This method sends a spout message to a non-JVM spout process
- *
- * @param msg spout message
- */
- void writeSpoutMsg(SpoutMsg msg) throws IOException;
-
- /**
- * This method sends a list of task IDs to a non-JVM bolt process
- *
- * @param taskIds list of task IDs
- */
- void writeTaskIds(List<Integer> taskIds) throws IOException;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/JsonSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/JsonSerializer.java b/jstorm-client/src/main/java/backtype/storm/multilang/JsonSerializer.java
deleted file mode 100644
index 9fca312..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/JsonSerializer.java
+++ /dev/null
@@ -1,202 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-import java.io.BufferedReader;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.OutputStream;
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-
-import org.json.simple.JSONObject;
-import org.json.simple.JSONValue;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-
-/**
- * JsonSerializer implements the JSON multilang protocol.
- */
-public class JsonSerializer implements ISerializer {
- private DataOutputStream processIn;
- private BufferedReader processOut;
-
- public void initialize(OutputStream processIn, InputStream processOut) {
- this.processIn = new DataOutputStream(processIn);
- try {
- this.processOut = new BufferedReader(new InputStreamReader(processOut, "UTF-8"));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- public Number connect(Map conf, TopologyContext context)
- throws IOException, NoOutputException {
- JSONObject setupInfo = new JSONObject();
- setupInfo.put("pidDir", context.getPIDDir());
- setupInfo.put("conf", conf);
- setupInfo.put("context", context);
- writeMessage(setupInfo);
-
- Number pid = (Number) ((JSONObject) readMessage()).get("pid");
- return pid;
- }
-
- public void writeBoltMsg(BoltMsg boltMsg) throws IOException {
- JSONObject obj = new JSONObject();
- obj.put("id", boltMsg.getId());
- obj.put("comp", boltMsg.getComp());
- obj.put("stream", boltMsg.getStream());
- obj.put("task", boltMsg.getTask());
- obj.put("tuple", boltMsg.getTuple());
- writeMessage(obj);
- }
-
- public void writeSpoutMsg(SpoutMsg msg) throws IOException {
- JSONObject obj = new JSONObject();
- obj.put("command", msg.getCommand());
- obj.put("id", msg.getId());
- writeMessage(obj);
- }
-
- public void writeTaskIds(List<Integer> taskIds) throws IOException {
- writeMessage(taskIds);
- }
-
- private void writeMessage(Object msg) throws IOException {
- writeString(JSONValue.toJSONString(msg));
- }
-
- private void writeString(String str) throws IOException {
- byte[] strBytes = str.getBytes("UTF-8");
- processIn.write(strBytes, 0, strBytes.length);
- processIn.writeBytes("\nend\n");
- processIn.flush();
- }
-
- public ShellMsg readShellMsg() throws IOException, NoOutputException {
- JSONObject msg = (JSONObject) readMessage();
- ShellMsg shellMsg = new ShellMsg();
-
- String command = (String) msg.get("command");
- shellMsg.setCommand(command);
-
- Object id = msg.get("id");
- shellMsg.setId(id);
-
- String log = (String) msg.get("msg");
- shellMsg.setMsg(log);
-
- String stream = (String) msg.get("stream");
- if (stream == null)
- stream = Utils.DEFAULT_STREAM_ID;
- shellMsg.setStream(stream);
-
- Object taskObj = msg.get("task");
- if (taskObj != null) {
- shellMsg.setTask((Long) taskObj);
- } else {
- shellMsg.setTask(0);
- }
-
- Object need_task_ids = msg.get("need_task_ids");
- if (need_task_ids == null || ((Boolean) need_task_ids).booleanValue()) {
- shellMsg.setNeedTaskIds(true);
- } else {
- shellMsg.setNeedTaskIds(false);
- }
-
- shellMsg.setTuple((List) msg.get("tuple"));
-
- //List<Tuple> anchors = new ArrayList<Tuple>();
- Object anchorObj = msg.get("anchors");
- if (anchorObj != null) {
- if (anchorObj instanceof String) {
- anchorObj = Arrays.asList(anchorObj);
- }
- for (Object o : (List) anchorObj) {
- shellMsg.addAnchor((String) o);
- }
- }
-
- Object nameObj = msg.get("name");
- String metricName = null;
- if (nameObj != null && nameObj instanceof String) {
- metricName = (String) nameObj;
- }
- shellMsg.setMetricName(metricName);
-
- Object paramsObj = msg.get("params");
- shellMsg.setMetricParams(paramsObj);
-
- if (command.equals("log")) {
- Object logLevelObj = msg.get("level");
- if (logLevelObj != null && logLevelObj instanceof Long) {
- long logLevel = (Long)logLevelObj;
- shellMsg.setLogLevel((int)logLevel);
- }
- }
-
- return shellMsg;
- }
-
- private Object readMessage() throws IOException, NoOutputException {
- String string = readString();
- Object msg = JSONValue.parse(string);
- if (msg != null) {
- return msg;
- } else {
- throw new IOException("unable to parse: " + string);
- }
- }
-
- private String readString() throws IOException, NoOutputException {
- StringBuilder line = new StringBuilder();
-
- while (true) {
- String subline = processOut.readLine();
- if (subline == null) {
- StringBuilder errorMessage = new StringBuilder();
- errorMessage.append("Pipe to subprocess seems to be broken!");
- if (line.length() == 0) {
- errorMessage.append(" No output read.\n");
- } else {
- errorMessage.append(" Currently read output: "
- + line.toString() + "\n");
- }
- errorMessage.append("Serializer Exception:\n");
- throw new NoOutputException(errorMessage.toString());
- }
- if (subline.equals("end")) {
- break;
- }
- if (line.length() != 0) {
- line.append("\n");
- }
- line.append(subline);
- }
- return line.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/NoOutputException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/NoOutputException.java b/jstorm-client/src/main/java/backtype/storm/multilang/NoOutputException.java
deleted file mode 100644
index 1ce75d3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/NoOutputException.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-/**
- * A NoOutputException states that no data has been received from the connected
- * non-JVM process.
- */
-public class NoOutputException extends Exception {
- public NoOutputException() {
- super();
- }
-
- public NoOutputException(String message) {
- super(message);
- }
-
- public NoOutputException(String message, Throwable cause) {
- super(message, cause);
- }
-
- public NoOutputException(Throwable cause) {
- super(cause);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/ShellMsg.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/ShellMsg.java b/jstorm-client/src/main/java/backtype/storm/multilang/ShellMsg.java
deleted file mode 100644
index 9eafb1a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/ShellMsg.java
+++ /dev/null
@@ -1,185 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * ShellMsg is an object that represents the data sent to a shell component from
- * a process that implements a multi-language protocol. It is the union of all
- * data types that a component can send to Storm.
- *
- * <p>
- * ShellMsgs are objects received from the ISerializer interface, after the
- * serializer has deserialized the data from the underlying wire protocol. The
- * ShellMsg class allows for a decoupling between the serialized representation
- * of the data and the data itself.
- * </p>
- */
-public class ShellMsg {
- private String command;
- private Object id;
- private List<String> anchors;
- private String stream;
- private long task;
- private String msg;
- private List<Object> tuple;
- private boolean needTaskIds;
-
- //metrics rpc
- private String metricName;
- private Object metricParams;
-
- //logLevel
- public enum ShellLogLevel {
- TRACE, DEBUG, INFO, WARN, ERROR;
-
- public static ShellLogLevel fromInt(int i) {
- switch (i) {
- case 0: return TRACE;
- case 1: return DEBUG;
- case 2: return INFO;
- case 3: return WARN;
- case 4: return ERROR;
- default: return INFO;
- }
- }
- }
-
- private ShellLogLevel logLevel = ShellLogLevel.INFO;
-
- public String getCommand() {
- return command;
- }
-
- public void setCommand(String command) {
- this.command = command;
- }
-
- public Object getId() {
- return id;
- }
-
- public void setId(Object id) {
- this.id = id;
- }
-
- public List<String> getAnchors() {
- return anchors;
- }
-
- public void setAnchors(List<String> anchors) {
- this.anchors = anchors;
- }
-
- public void addAnchor(String anchor) {
- if (anchors == null) {
- anchors = new ArrayList<String>();
- }
- this.anchors.add(anchor);
- }
-
- public String getStream() {
- return stream;
- }
-
- public void setStream(String stream) {
- this.stream = stream;
- }
-
- public long getTask() {
- return task;
- }
-
- public void setTask(long task) {
- this.task = task;
- }
-
- public String getMsg() {
- return msg;
- }
-
- public void setMsg(String msg) {
- this.msg = msg;
- }
-
- public List<Object> getTuple() {
- return tuple;
- }
-
- public void setTuple(List<Object> tuple) {
- this.tuple = tuple;
- }
-
- public void addTuple(Object tuple) {
- if (this.tuple == null) {
- this.tuple = new ArrayList<Object>();
- }
- this.tuple.add(tuple);
- }
-
- public boolean areTaskIdsNeeded() {
- return needTaskIds;
- }
-
- public void setNeedTaskIds(boolean needTaskIds) {
- this.needTaskIds = needTaskIds;
- }
-
- public void setMetricName(String metricName) {
- this.metricName = metricName;
- }
-
- public String getMetricName() {
- return this.metricName;
- }
-
- public void setMetricParams(Object metricParams) {
- this.metricParams = metricParams;
- }
-
- public Object getMetricParams() {
- return metricParams;
- }
-
- public ShellLogLevel getLogLevel() {
- return logLevel;
- }
-
- public void setLogLevel(int logLevel) {
- this.logLevel = ShellLogLevel.fromInt(logLevel);
- }
-
- @Override
- public String toString() {
- return "ShellMsg{" +
- "command='" + command + '\'' +
- ", id=" + id +
- ", anchors=" + anchors +
- ", stream='" + stream + '\'' +
- ", task=" + task +
- ", msg='" + msg + '\'' +
- ", tuple=" + tuple +
- ", needTaskIds=" + needTaskIds +
- ", metricName='" + metricName + '\'' +
- ", metricParams=" + metricParams +
- ", logLevel=" + logLevel +
- '}';
- }
-}
[41/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ComponentCommon.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ComponentCommon.java b/jstorm-client/src/main/java/backtype/storm/generated/ComponentCommon.java
deleted file mode 100644
index 273f8be..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ComponentCommon.java
+++ /dev/null
@@ -1,707 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ComponentCommon implements org.apache.thrift7.TBase<ComponentCommon, ComponentCommon._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ComponentCommon");
-
- private static final org.apache.thrift7.protocol.TField INPUTS_FIELD_DESC = new org.apache.thrift7.protocol.TField("inputs", org.apache.thrift7.protocol.TType.MAP, (short)1);
- private static final org.apache.thrift7.protocol.TField STREAMS_FIELD_DESC = new org.apache.thrift7.protocol.TField("streams", org.apache.thrift7.protocol.TType.MAP, (short)2);
- private static final org.apache.thrift7.protocol.TField PARALLELISM_HINT_FIELD_DESC = new org.apache.thrift7.protocol.TField("parallelism_hint", org.apache.thrift7.protocol.TType.I32, (short)3);
- private static final org.apache.thrift7.protocol.TField JSON_CONF_FIELD_DESC = new org.apache.thrift7.protocol.TField("json_conf", org.apache.thrift7.protocol.TType.STRING, (short)4);
-
- private Map<GlobalStreamId,Grouping> inputs; // required
- private Map<String,StreamInfo> streams; // required
- private int parallelism_hint; // required
- private String json_conf; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- INPUTS((short)1, "inputs"),
- STREAMS((short)2, "streams"),
- PARALLELISM_HINT((short)3, "parallelism_hint"),
- JSON_CONF((short)4, "json_conf");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // INPUTS
- return INPUTS;
- case 2: // STREAMS
- return STREAMS;
- case 3: // PARALLELISM_HINT
- return PARALLELISM_HINT;
- case 4: // JSON_CONF
- return JSON_CONF;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __PARALLELISM_HINT_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.INPUTS, new org.apache.thrift7.meta_data.FieldMetaData("inputs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, Grouping.class))));
- tmpMap.put(_Fields.STREAMS, new org.apache.thrift7.meta_data.FieldMetaData("streams", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, StreamInfo.class))));
- tmpMap.put(_Fields.PARALLELISM_HINT, new org.apache.thrift7.meta_data.FieldMetaData("parallelism_hint", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.JSON_CONF, new org.apache.thrift7.meta_data.FieldMetaData("json_conf", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ComponentCommon.class, metaDataMap);
- }
-
- public ComponentCommon() {
- }
-
- public ComponentCommon(
- Map<GlobalStreamId,Grouping> inputs,
- Map<String,StreamInfo> streams)
- {
- this();
- this.inputs = inputs;
- this.streams = streams;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public ComponentCommon(ComponentCommon other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_inputs()) {
- Map<GlobalStreamId,Grouping> __this__inputs = new HashMap<GlobalStreamId,Grouping>();
- for (Map.Entry<GlobalStreamId, Grouping> other_element : other.inputs.entrySet()) {
-
- GlobalStreamId other_element_key = other_element.getKey();
- Grouping other_element_value = other_element.getValue();
-
- GlobalStreamId __this__inputs_copy_key = new GlobalStreamId(other_element_key);
-
- Grouping __this__inputs_copy_value = new Grouping(other_element_value);
-
- __this__inputs.put(__this__inputs_copy_key, __this__inputs_copy_value);
- }
- this.inputs = __this__inputs;
- }
- if (other.is_set_streams()) {
- Map<String,StreamInfo> __this__streams = new HashMap<String,StreamInfo>();
- for (Map.Entry<String, StreamInfo> other_element : other.streams.entrySet()) {
-
- String other_element_key = other_element.getKey();
- StreamInfo other_element_value = other_element.getValue();
-
- String __this__streams_copy_key = other_element_key;
-
- StreamInfo __this__streams_copy_value = new StreamInfo(other_element_value);
-
- __this__streams.put(__this__streams_copy_key, __this__streams_copy_value);
- }
- this.streams = __this__streams;
- }
- this.parallelism_hint = other.parallelism_hint;
- if (other.is_set_json_conf()) {
- this.json_conf = other.json_conf;
- }
- }
-
- public ComponentCommon deepCopy() {
- return new ComponentCommon(this);
- }
-
- @Override
- public void clear() {
- this.inputs = null;
- this.streams = null;
- set_parallelism_hint_isSet(false);
- this.parallelism_hint = 0;
- this.json_conf = null;
- }
-
- public int get_inputs_size() {
- return (this.inputs == null) ? 0 : this.inputs.size();
- }
-
- public void put_to_inputs(GlobalStreamId key, Grouping val) {
- if (this.inputs == null) {
- this.inputs = new HashMap<GlobalStreamId,Grouping>();
- }
- this.inputs.put(key, val);
- }
-
- public Map<GlobalStreamId,Grouping> get_inputs() {
- return this.inputs;
- }
-
- public void set_inputs(Map<GlobalStreamId,Grouping> inputs) {
- this.inputs = inputs;
- }
-
- public void unset_inputs() {
- this.inputs = null;
- }
-
- /** Returns true if field inputs is set (has been assigned a value) and false otherwise */
- public boolean is_set_inputs() {
- return this.inputs != null;
- }
-
- public void set_inputs_isSet(boolean value) {
- if (!value) {
- this.inputs = null;
- }
- }
-
- public int get_streams_size() {
- return (this.streams == null) ? 0 : this.streams.size();
- }
-
- public void put_to_streams(String key, StreamInfo val) {
- if (this.streams == null) {
- this.streams = new HashMap<String,StreamInfo>();
- }
- this.streams.put(key, val);
- }
-
- public Map<String,StreamInfo> get_streams() {
- return this.streams;
- }
-
- public void set_streams(Map<String,StreamInfo> streams) {
- this.streams = streams;
- }
-
- public void unset_streams() {
- this.streams = null;
- }
-
- /** Returns true if field streams is set (has been assigned a value) and false otherwise */
- public boolean is_set_streams() {
- return this.streams != null;
- }
-
- public void set_streams_isSet(boolean value) {
- if (!value) {
- this.streams = null;
- }
- }
-
- public int get_parallelism_hint() {
- return this.parallelism_hint;
- }
-
- public void set_parallelism_hint(int parallelism_hint) {
- this.parallelism_hint = parallelism_hint;
- set_parallelism_hint_isSet(true);
- }
-
- public void unset_parallelism_hint() {
- __isset_bit_vector.clear(__PARALLELISM_HINT_ISSET_ID);
- }
-
- /** Returns true if field parallelism_hint is set (has been assigned a value) and false otherwise */
- public boolean is_set_parallelism_hint() {
- return __isset_bit_vector.get(__PARALLELISM_HINT_ISSET_ID);
- }
-
- public void set_parallelism_hint_isSet(boolean value) {
- __isset_bit_vector.set(__PARALLELISM_HINT_ISSET_ID, value);
- }
-
- public String get_json_conf() {
- return this.json_conf;
- }
-
- public void set_json_conf(String json_conf) {
- this.json_conf = json_conf;
- }
-
- public void unset_json_conf() {
- this.json_conf = null;
- }
-
- /** Returns true if field json_conf is set (has been assigned a value) and false otherwise */
- public boolean is_set_json_conf() {
- return this.json_conf != null;
- }
-
- public void set_json_conf_isSet(boolean value) {
- if (!value) {
- this.json_conf = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case INPUTS:
- if (value == null) {
- unset_inputs();
- } else {
- set_inputs((Map<GlobalStreamId,Grouping>)value);
- }
- break;
-
- case STREAMS:
- if (value == null) {
- unset_streams();
- } else {
- set_streams((Map<String,StreamInfo>)value);
- }
- break;
-
- case PARALLELISM_HINT:
- if (value == null) {
- unset_parallelism_hint();
- } else {
- set_parallelism_hint((Integer)value);
- }
- break;
-
- case JSON_CONF:
- if (value == null) {
- unset_json_conf();
- } else {
- set_json_conf((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case INPUTS:
- return get_inputs();
-
- case STREAMS:
- return get_streams();
-
- case PARALLELISM_HINT:
- return Integer.valueOf(get_parallelism_hint());
-
- case JSON_CONF:
- return get_json_conf();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case INPUTS:
- return is_set_inputs();
- case STREAMS:
- return is_set_streams();
- case PARALLELISM_HINT:
- return is_set_parallelism_hint();
- case JSON_CONF:
- return is_set_json_conf();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof ComponentCommon)
- return this.equals((ComponentCommon)that);
- return false;
- }
-
- public boolean equals(ComponentCommon that) {
- if (that == null)
- return false;
-
- boolean this_present_inputs = true && this.is_set_inputs();
- boolean that_present_inputs = true && that.is_set_inputs();
- if (this_present_inputs || that_present_inputs) {
- if (!(this_present_inputs && that_present_inputs))
- return false;
- if (!this.inputs.equals(that.inputs))
- return false;
- }
-
- boolean this_present_streams = true && this.is_set_streams();
- boolean that_present_streams = true && that.is_set_streams();
- if (this_present_streams || that_present_streams) {
- if (!(this_present_streams && that_present_streams))
- return false;
- if (!this.streams.equals(that.streams))
- return false;
- }
-
- boolean this_present_parallelism_hint = true && this.is_set_parallelism_hint();
- boolean that_present_parallelism_hint = true && that.is_set_parallelism_hint();
- if (this_present_parallelism_hint || that_present_parallelism_hint) {
- if (!(this_present_parallelism_hint && that_present_parallelism_hint))
- return false;
- if (this.parallelism_hint != that.parallelism_hint)
- return false;
- }
-
- boolean this_present_json_conf = true && this.is_set_json_conf();
- boolean that_present_json_conf = true && that.is_set_json_conf();
- if (this_present_json_conf || that_present_json_conf) {
- if (!(this_present_json_conf && that_present_json_conf))
- return false;
- if (!this.json_conf.equals(that.json_conf))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_inputs = true && (is_set_inputs());
- builder.append(present_inputs);
- if (present_inputs)
- builder.append(inputs);
-
- boolean present_streams = true && (is_set_streams());
- builder.append(present_streams);
- if (present_streams)
- builder.append(streams);
-
- boolean present_parallelism_hint = true && (is_set_parallelism_hint());
- builder.append(present_parallelism_hint);
- if (present_parallelism_hint)
- builder.append(parallelism_hint);
-
- boolean present_json_conf = true && (is_set_json_conf());
- builder.append(present_json_conf);
- if (present_json_conf)
- builder.append(json_conf);
-
- return builder.toHashCode();
- }
-
- public int compareTo(ComponentCommon other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- ComponentCommon typedOther = (ComponentCommon)other;
-
- lastComparison = Boolean.valueOf(is_set_inputs()).compareTo(typedOther.is_set_inputs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_inputs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.inputs, typedOther.inputs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_streams()).compareTo(typedOther.is_set_streams());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_streams()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.streams, typedOther.streams);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_parallelism_hint()).compareTo(typedOther.is_set_parallelism_hint());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_parallelism_hint()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.parallelism_hint, typedOther.parallelism_hint);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_json_conf()).compareTo(typedOther.is_set_json_conf());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_json_conf()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.json_conf, typedOther.json_conf);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // INPUTS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map12 = iprot.readMapBegin();
- this.inputs = new HashMap<GlobalStreamId,Grouping>(2*_map12.size);
- for (int _i13 = 0; _i13 < _map12.size; ++_i13)
- {
- GlobalStreamId _key14; // required
- Grouping _val15; // required
- _key14 = new GlobalStreamId();
- _key14.read(iprot);
- _val15 = new Grouping();
- _val15.read(iprot);
- this.inputs.put(_key14, _val15);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // STREAMS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map16 = iprot.readMapBegin();
- this.streams = new HashMap<String,StreamInfo>(2*_map16.size);
- for (int _i17 = 0; _i17 < _map16.size; ++_i17)
- {
- String _key18; // required
- StreamInfo _val19; // required
- _key18 = iprot.readString();
- _val19 = new StreamInfo();
- _val19.read(iprot);
- this.streams.put(_key18, _val19);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // PARALLELISM_HINT
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.parallelism_hint = iprot.readI32();
- set_parallelism_hint_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // JSON_CONF
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.json_conf = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.inputs != null) {
- oprot.writeFieldBegin(INPUTS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.STRUCT, this.inputs.size()));
- for (Map.Entry<GlobalStreamId, Grouping> _iter20 : this.inputs.entrySet())
- {
- _iter20.getKey().write(oprot);
- _iter20.getValue().write(oprot);
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.streams != null) {
- oprot.writeFieldBegin(STREAMS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.STRUCT, this.streams.size()));
- for (Map.Entry<String, StreamInfo> _iter21 : this.streams.entrySet())
- {
- oprot.writeString(_iter21.getKey());
- _iter21.getValue().write(oprot);
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (is_set_parallelism_hint()) {
- oprot.writeFieldBegin(PARALLELISM_HINT_FIELD_DESC);
- oprot.writeI32(this.parallelism_hint);
- oprot.writeFieldEnd();
- }
- if (this.json_conf != null) {
- if (is_set_json_conf()) {
- oprot.writeFieldBegin(JSON_CONF_FIELD_DESC);
- oprot.writeString(this.json_conf);
- oprot.writeFieldEnd();
- }
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ComponentCommon(");
- boolean first = true;
-
- sb.append("inputs:");
- if (this.inputs == null) {
- sb.append("null");
- } else {
- sb.append(this.inputs);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("streams:");
- if (this.streams == null) {
- sb.append("null");
- } else {
- sb.append(this.streams);
- }
- first = false;
- if (is_set_parallelism_hint()) {
- if (!first) sb.append(", ");
- sb.append("parallelism_hint:");
- sb.append(this.parallelism_hint);
- first = false;
- }
- if (is_set_json_conf()) {
- if (!first) sb.append(", ");
- sb.append("json_conf:");
- if (this.json_conf == null) {
- sb.append("null");
- } else {
- sb.append(this.json_conf);
- }
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_inputs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'inputs' is unset! Struct:" + toString());
- }
-
- if (!is_set_streams()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'streams' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ComponentObject.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ComponentObject.java b/jstorm-client/src/main/java/backtype/storm/generated/ComponentObject.java
deleted file mode 100644
index 0509519..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ComponentObject.java
+++ /dev/null
@@ -1,385 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ComponentObject extends org.apache.thrift7.TUnion<ComponentObject, ComponentObject._Fields> {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ComponentObject");
- private static final org.apache.thrift7.protocol.TField SERIALIZED_JAVA_FIELD_DESC = new org.apache.thrift7.protocol.TField("serialized_java", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField SHELL_FIELD_DESC = new org.apache.thrift7.protocol.TField("shell", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
- private static final org.apache.thrift7.protocol.TField JAVA_OBJECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("java_object", org.apache.thrift7.protocol.TType.STRUCT, (short)3);
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SERIALIZED_JAVA((short)1, "serialized_java"),
- SHELL((short)2, "shell"),
- JAVA_OBJECT((short)3, "java_object");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // SERIALIZED_JAVA
- return SERIALIZED_JAVA;
- case 2: // SHELL
- return SHELL;
- case 3: // JAVA_OBJECT
- return JAVA_OBJECT;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SERIALIZED_JAVA, new org.apache.thrift7.meta_data.FieldMetaData("serialized_java", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING , true)));
- tmpMap.put(_Fields.SHELL, new org.apache.thrift7.meta_data.FieldMetaData("shell", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ShellComponent.class)));
- tmpMap.put(_Fields.JAVA_OBJECT, new org.apache.thrift7.meta_data.FieldMetaData("java_object", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, JavaObject.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ComponentObject.class, metaDataMap);
- }
-
- public ComponentObject() {
- super();
- }
-
- public ComponentObject(_Fields setField, Object value) {
- super(setField, value);
- }
-
- public ComponentObject(ComponentObject other) {
- super(other);
- }
- public ComponentObject deepCopy() {
- return new ComponentObject(this);
- }
-
- public static ComponentObject serialized_java(ByteBuffer value) {
- ComponentObject x = new ComponentObject();
- x.set_serialized_java(value);
- return x;
- }
-
- public static ComponentObject serialized_java(byte[] value) {
- ComponentObject x = new ComponentObject();
- x.set_serialized_java(ByteBuffer.wrap(value));
- return x;
- }
-
- public static ComponentObject shell(ShellComponent value) {
- ComponentObject x = new ComponentObject();
- x.set_shell(value);
- return x;
- }
-
- public static ComponentObject java_object(JavaObject value) {
- ComponentObject x = new ComponentObject();
- x.set_java_object(value);
- return x;
- }
-
-
- @Override
- protected void checkType(_Fields setField, Object value) throws ClassCastException {
- switch (setField) {
- case SERIALIZED_JAVA:
- if (value instanceof ByteBuffer) {
- break;
- }
- throw new ClassCastException("Was expecting value of type ByteBuffer for field 'serialized_java', but got " + value.getClass().getSimpleName());
- case SHELL:
- if (value instanceof ShellComponent) {
- break;
- }
- throw new ClassCastException("Was expecting value of type ShellComponent for field 'shell', but got " + value.getClass().getSimpleName());
- case JAVA_OBJECT:
- if (value instanceof JavaObject) {
- break;
- }
- throw new ClassCastException("Was expecting value of type JavaObject for field 'java_object', but got " + value.getClass().getSimpleName());
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected Object readValue(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TField field) throws org.apache.thrift7.TException {
- _Fields setField = _Fields.findByThriftId(field.id);
- if (setField != null) {
- switch (setField) {
- case SERIALIZED_JAVA:
- if (field.type == SERIALIZED_JAVA_FIELD_DESC.type) {
- ByteBuffer serialized_java;
- serialized_java = iprot.readBinary();
- return serialized_java;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case SHELL:
- if (field.type == SHELL_FIELD_DESC.type) {
- ShellComponent shell;
- shell = new ShellComponent();
- shell.read(iprot);
- return shell;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case JAVA_OBJECT:
- if (field.type == JAVA_OBJECT_FIELD_DESC.type) {
- JavaObject java_object;
- java_object = new JavaObject();
- java_object.read(iprot);
- return java_object;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- default:
- throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- }
-
- @Override
- protected void writeValue(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- switch (setField_) {
- case SERIALIZED_JAVA:
- ByteBuffer serialized_java = (ByteBuffer)value_;
- oprot.writeBinary(serialized_java);
- return;
- case SHELL:
- ShellComponent shell = (ShellComponent)value_;
- shell.write(oprot);
- return;
- case JAVA_OBJECT:
- JavaObject java_object = (JavaObject)value_;
- java_object.write(oprot);
- return;
- default:
- throw new IllegalStateException("Cannot write union with unknown field " + setField_);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TField getFieldDesc(_Fields setField) {
- switch (setField) {
- case SERIALIZED_JAVA:
- return SERIALIZED_JAVA_FIELD_DESC;
- case SHELL:
- return SHELL_FIELD_DESC;
- case JAVA_OBJECT:
- return JAVA_OBJECT_FIELD_DESC;
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TStruct getStructDesc() {
- return STRUCT_DESC;
- }
-
- @Override
- protected _Fields enumForId(short id) {
- return _Fields.findByThriftIdOrThrow(id);
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
-
- public byte[] get_serialized_java() {
- set_serialized_java(org.apache.thrift7.TBaseHelper.rightSize(buffer_for_serialized_java()));
- ByteBuffer b = buffer_for_serialized_java();
- return b == null ? null : b.array();
- }
-
- public ByteBuffer buffer_for_serialized_java() {
- if (getSetField() == _Fields.SERIALIZED_JAVA) {
- return (ByteBuffer)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'serialized_java' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_serialized_java(byte[] value) {
- set_serialized_java(ByteBuffer.wrap(value));
- }
-
- public void set_serialized_java(ByteBuffer value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.SERIALIZED_JAVA;
- value_ = value;
- }
-
- public ShellComponent get_shell() {
- if (getSetField() == _Fields.SHELL) {
- return (ShellComponent)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'shell' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_shell(ShellComponent value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.SHELL;
- value_ = value;
- }
-
- public JavaObject get_java_object() {
- if (getSetField() == _Fields.JAVA_OBJECT) {
- return (JavaObject)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'java_object' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_java_object(JavaObject value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.JAVA_OBJECT;
- value_ = value;
- }
-
- public boolean is_set_serialized_java() {
- return setField_ == _Fields.SERIALIZED_JAVA;
- }
-
-
- public boolean is_set_shell() {
- return setField_ == _Fields.SHELL;
- }
-
-
- public boolean is_set_java_object() {
- return setField_ == _Fields.JAVA_OBJECT;
- }
-
-
- public boolean equals(Object other) {
- if (other instanceof ComponentObject) {
- return equals((ComponentObject)other);
- } else {
- return false;
- }
- }
-
- public boolean equals(ComponentObject other) {
- return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
- }
-
- @Override
- public int compareTo(ComponentObject other) {
- int lastComparison = org.apache.thrift7.TBaseHelper.compareTo(getSetField(), other.getSetField());
- if (lastComparison == 0) {
- return org.apache.thrift7.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
- }
- return lastComparison;
- }
-
-
- @Override
- public int hashCode() {
- HashCodeBuilder hcb = new HashCodeBuilder();
- hcb.append(this.getClass().getName());
- org.apache.thrift7.TFieldIdEnum setField = getSetField();
- if (setField != null) {
- hcb.append(setField.getThriftFieldId());
- Object value = getFieldValue();
- if (value instanceof org.apache.thrift7.TEnum) {
- hcb.append(((org.apache.thrift7.TEnum)getFieldValue()).getValue());
- } else {
- hcb.append(value);
- }
- }
- return hcb.toHashCode();
- }
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/DRPCExecutionException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/DRPCExecutionException.java b/jstorm-client/src/main/java/backtype/storm/generated/DRPCExecutionException.java
deleted file mode 100644
index 4dbac48..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/DRPCExecutionException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class DRPCExecutionException extends Exception implements org.apache.thrift7.TBase<DRPCExecutionException, DRPCExecutionException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("DRPCExecutionException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(DRPCExecutionException.class, metaDataMap);
- }
-
- public DRPCExecutionException() {
- }
-
- public DRPCExecutionException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public DRPCExecutionException(DRPCExecutionException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public DRPCExecutionException deepCopy() {
- return new DRPCExecutionException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof DRPCExecutionException)
- return this.equals((DRPCExecutionException)that);
- return false;
- }
-
- public boolean equals(DRPCExecutionException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(DRPCExecutionException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- DRPCExecutionException typedOther = (DRPCExecutionException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("DRPCExecutionException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/DRPCRequest.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/DRPCRequest.java b/jstorm-client/src/main/java/backtype/storm/generated/DRPCRequest.java
deleted file mode 100644
index ce8cfb5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/DRPCRequest.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class DRPCRequest implements org.apache.thrift7.TBase<DRPCRequest, DRPCRequest._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("DRPCRequest");
-
- private static final org.apache.thrift7.protocol.TField FUNC_ARGS_FIELD_DESC = new org.apache.thrift7.protocol.TField("func_args", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField REQUEST_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("request_id", org.apache.thrift7.protocol.TType.STRING, (short)2);
-
- private String func_args; // required
- private String request_id; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- FUNC_ARGS((short)1, "func_args"),
- REQUEST_ID((short)2, "request_id");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // FUNC_ARGS
- return FUNC_ARGS;
- case 2: // REQUEST_ID
- return REQUEST_ID;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FUNC_ARGS, new org.apache.thrift7.meta_data.FieldMetaData("func_args", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.REQUEST_ID, new org.apache.thrift7.meta_data.FieldMetaData("request_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(DRPCRequest.class, metaDataMap);
- }
-
- public DRPCRequest() {
- }
-
- public DRPCRequest(
- String func_args,
- String request_id)
- {
- this();
- this.func_args = func_args;
- this.request_id = request_id;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public DRPCRequest(DRPCRequest other) {
- if (other.is_set_func_args()) {
- this.func_args = other.func_args;
- }
- if (other.is_set_request_id()) {
- this.request_id = other.request_id;
- }
- }
-
- public DRPCRequest deepCopy() {
- return new DRPCRequest(this);
- }
-
- @Override
- public void clear() {
- this.func_args = null;
- this.request_id = null;
- }
-
- public String get_func_args() {
- return this.func_args;
- }
-
- public void set_func_args(String func_args) {
- this.func_args = func_args;
- }
-
- public void unset_func_args() {
- this.func_args = null;
- }
-
- /** Returns true if field func_args is set (has been assigned a value) and false otherwise */
- public boolean is_set_func_args() {
- return this.func_args != null;
- }
-
- public void set_func_args_isSet(boolean value) {
- if (!value) {
- this.func_args = null;
- }
- }
-
- public String get_request_id() {
- return this.request_id;
- }
-
- public void set_request_id(String request_id) {
- this.request_id = request_id;
- }
-
- public void unset_request_id() {
- this.request_id = null;
- }
-
- /** Returns true if field request_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_request_id() {
- return this.request_id != null;
- }
-
- public void set_request_id_isSet(boolean value) {
- if (!value) {
- this.request_id = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case FUNC_ARGS:
- if (value == null) {
- unset_func_args();
- } else {
- set_func_args((String)value);
- }
- break;
-
- case REQUEST_ID:
- if (value == null) {
- unset_request_id();
- } else {
- set_request_id((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case FUNC_ARGS:
- return get_func_args();
-
- case REQUEST_ID:
- return get_request_id();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case FUNC_ARGS:
- return is_set_func_args();
- case REQUEST_ID:
- return is_set_request_id();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof DRPCRequest)
- return this.equals((DRPCRequest)that);
- return false;
- }
-
- public boolean equals(DRPCRequest that) {
- if (that == null)
- return false;
-
- boolean this_present_func_args = true && this.is_set_func_args();
- boolean that_present_func_args = true && that.is_set_func_args();
- if (this_present_func_args || that_present_func_args) {
- if (!(this_present_func_args && that_present_func_args))
- return false;
- if (!this.func_args.equals(that.func_args))
- return false;
- }
-
- boolean this_present_request_id = true && this.is_set_request_id();
- boolean that_present_request_id = true && that.is_set_request_id();
- if (this_present_request_id || that_present_request_id) {
- if (!(this_present_request_id && that_present_request_id))
- return false;
- if (!this.request_id.equals(that.request_id))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_func_args = true && (is_set_func_args());
- builder.append(present_func_args);
- if (present_func_args)
- builder.append(func_args);
-
- boolean present_request_id = true && (is_set_request_id());
- builder.append(present_request_id);
- if (present_request_id)
- builder.append(request_id);
-
- return builder.toHashCode();
- }
-
- public int compareTo(DRPCRequest other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- DRPCRequest typedOther = (DRPCRequest)other;
-
- lastComparison = Boolean.valueOf(is_set_func_args()).compareTo(typedOther.is_set_func_args());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_func_args()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.func_args, typedOther.func_args);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_request_id()).compareTo(typedOther.is_set_request_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_request_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.request_id, typedOther.request_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // FUNC_ARGS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.func_args = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // REQUEST_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.request_id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.func_args != null) {
- oprot.writeFieldBegin(FUNC_ARGS_FIELD_DESC);
- oprot.writeString(this.func_args);
- oprot.writeFieldEnd();
- }
- if (this.request_id != null) {
- oprot.writeFieldBegin(REQUEST_ID_FIELD_DESC);
- oprot.writeString(this.request_id);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("DRPCRequest(");
- boolean first = true;
-
- sb.append("func_args:");
- if (this.func_args == null) {
- sb.append("null");
- } else {
- sb.append(this.func_args);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("request_id:");
- if (this.request_id == null) {
- sb.append("null");
- } else {
- sb.append(this.request_id);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_func_args()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'func_args' is unset! Struct:" + toString());
- }
-
- if (!is_set_request_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'request_id' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[46/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/JStormUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/JStormUtils.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/JStormUtils.java
deleted file mode 100644
index 17b28cf..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/JStormUtils.java
+++ /dev/null
@@ -1,1076 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.BufferedOutputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.lang.management.ManagementFactory;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeMap;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipFile;
-
-import javax.management.ObjectName;
-
-import org.apache.commons.exec.CommandLine;
-import org.apache.commons.exec.DefaultExecutor;
-import org.apache.commons.exec.ExecuteException;
-import org.apache.commons.exec.ExecuteResultHandler;
-import org.apache.commons.exec.PumpStreamHandler;
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Appender;
-import org.apache.log4j.FileAppender;
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.callback.AsyncLoopDefaultKill;
-import com.alibaba.jstorm.callback.RunnableCallback;
-import com.alibaba.jstorm.client.ConfigExtension;
-
-/**
- * JStorm utility
- *
- * @author yannian/Longda/Xin.Zhou/Xin.Li
- *
- */
-public class JStormUtils {
- private static final Logger LOG = Logger.getLogger(JStormUtils.class);
-
- public static long SIZE_1_K = 1024;
- public static long SIZE_1_M = SIZE_1_K * 1024;
- public static long SIZE_1_G = SIZE_1_M * 1024;
- public static long SIZE_1_T = SIZE_1_G * 1024;
- public static long SIZE_1_P = SIZE_1_T * 1024;
-
- public static final int MIN_1 = 60;
- public static final int MIN_30 = MIN_1 * 30;
- public static final int HOUR_1 = MIN_30 * 2;
- public static final int DAY_1 = HOUR_1 * 24;
-
- public static final String osName = System.getProperty("os.name");
-
- public static String getErrorInfo(String baseInfo, Exception e) {
- try {
- StringWriter sw = new StringWriter();
- PrintWriter pw = new PrintWriter(sw);
- e.printStackTrace(pw);
- return baseInfo + "\r\n" + sw.toString() + "\r\n";
- } catch (Exception e2) {
- return baseInfo;
- }
- }
-
- public static String getErrorInfo(Throwable error) {
- try {
- StringWriter sw = new StringWriter();
- PrintWriter pw = new PrintWriter(sw);
- error.printStackTrace(pw);
- return sw.toString();
- } catch (Exception e1) {
- return "";
- }
- }
-
- /**
- * filter the map
- *
- * @param filter
- * @param all
- * @return
- */
- public static <K, V> Map<K, V> select_keys_pred(Set<K> filter, Map<K, V> all) {
- Map<K, V> filterMap = new HashMap<K, V>();
-
- for (Entry<K, V> entry : all.entrySet()) {
- if (!filter.contains(entry.getKey())) {
- filterMap.put(entry.getKey(), entry.getValue());
- }
- }
-
- return filterMap;
- }
-
- public static byte[] barr(byte v) {
- byte[] byteArray = new byte[1];
- byteArray[0] = v;
-
- return byteArray;
- }
-
- public static byte[] barr(Short v) {
- byte[] byteArray = new byte[Short.SIZE / 8];
- for (int i = 0; i < byteArray.length; i++) {
- int off = (byteArray.length - 1 - i) * 8;
- byteArray[i] = (byte) ((v >> off) & 0xFF);
- }
- return byteArray;
- }
-
- public static byte[] barr(Integer v) {
- byte[] byteArray = new byte[Integer.SIZE / 8];
- for (int i = 0; i < byteArray.length; i++) {
- int off = (byteArray.length - 1 - i) * 8;
- byteArray[i] = (byte) ((v >> off) & 0xFF);
- }
- return byteArray;
- }
-
- // for test
- public static int byteToInt2(byte[] b) {
-
- int iOutcome = 0;
- byte bLoop;
-
- for (int i = 0; i < 4; i++) {
- bLoop = b[i];
- int off = (b.length - 1 - i) * 8;
- iOutcome += (bLoop & 0xFF) << off;
-
- }
-
- return iOutcome;
- }
-
- /**
- * LocalMode variable isn't clean, it make the JStormUtils ugly
- */
- public static boolean localMode = false;
-
- public static boolean isLocalMode() {
- return localMode;
- }
-
- public static void setLocalMode(boolean localMode) {
- JStormUtils.localMode = localMode;
- }
-
- public static void haltProcess(int val) {
- Runtime.getRuntime().halt(val);
- }
-
- public static void halt_process(int val, String msg) {
- LOG.info("Halting process: " + msg);
- try {
- Thread.sleep(1000);
- } catch (InterruptedException e) {
- }
- if (localMode && val == 0) {
- //throw new RuntimeException(msg);
- }else {
- haltProcess(val);
- }
- }
-
- /**
- * "{:a 1 :b 1 :c 2} -> {1 [:a :b] 2 :c}"
- *
- * @param map
- * @return
- */
- public static <K, V> HashMap<V, List<K>> reverse_map(Map<K, V> map) {
- HashMap<V, List<K>> rtn = new HashMap<V, List<K>>();
- if (map == null) {
- return rtn;
- }
- for (Entry<K, V> entry : map.entrySet()) {
- K key = entry.getKey();
- V val = entry.getValue();
- List<K> list = rtn.get(val);
- if (list == null) {
- list = new ArrayList<K>();
- rtn.put(entry.getValue(), list);
- }
- list.add(key);
-
- }
-
- return rtn;
- }
-
- /**
- * Gets the pid of this JVM, because Java doesn't provide a real way to do
- * this.
- *
- * @return
- */
- public static String process_pid() {
- String name = ManagementFactory.getRuntimeMXBean().getName();
- String[] split = name.split("@");
- if (split.length != 2) {
- throw new RuntimeException("Got unexpected process name: " + name);
- }
-
- return split[0];
- }
-
- public static void exec_command(String command) throws ExecuteException,
- IOException {
- String[] cmdlist = command.split(" ");
- CommandLine cmd = new CommandLine(cmdlist[0]);
- for (int i = 1; i < cmdlist.length; i++) {
- cmd.addArgument(cmdlist[i]);
- }
-
- DefaultExecutor exec = new DefaultExecutor();
- exec.execute(cmd);
- }
-
- /**
- * Extra dir from the jar to destdir
- *
- * @param jarpath
- * @param dir
- * @param destdir
- */
- public static void extract_dir_from_jar(String jarpath, String dir,
- String destdir) {
- String cmd = "unzip -qq " + jarpath + " " + dir + "/** -d " + destdir;
- try {
- exec_command(cmd);
- } catch (Exception e) {
- LOG.warn("No " + dir + " from " + jarpath + " by cmd:" + cmd + "!\n"
- + e.getMessage());
- }
-
- }
-
- public static void ensure_process_killed(Integer pid) {
- // in this function, just kill the process 5 times
- // make sure the process be killed definitely
- for (int i = 0; i < 5; i++) {
- try {
- exec_command("kill -9 " + pid);
- LOG.info("kill -9 process " + pid);
- sleepMs(100);
- } catch (ExecuteException e) {
- LOG.info("Error when trying to kill " + pid
- + ". Process has been killed");
- } catch (Exception e) {
- LOG.info("Error when trying to kill " + pid + ".Exception ", e);
- }
- }
- }
-
- public static void process_killed(Integer pid) {
- try {
- exec_command("kill " + pid);
- LOG.info("kill process " + pid);
- } catch (ExecuteException e) {
- LOG.info("Error when trying to kill " + pid
- + ". Process has been killed. ");
- } catch (Exception e) {
- LOG.info("Error when trying to kill " + pid + ".Exception ", e);
- }
- }
-
- public static void kill(Integer pid) {
- process_killed(pid);
-
- sleepMs(5 * 1000);
-
- ensure_process_killed(pid);
- }
-
- public static void kill_signal(Integer pid, String signal) {
- String cmd = "kill " + signal + " " + pid;
- try {
- exec_command(cmd);
- LOG.info(cmd);
- } catch (ExecuteException e) {
- LOG.info("Error when run " + cmd + ". Process has been killed. ");
- } catch (Exception e) {
- LOG.info("Error when run " + cmd + ". Exception ", e);
- }
- }
-
- /**
- * This function is only for linux
- *
- * @param pid
- * @return
- */
- public static boolean isProcDead(String pid) {
- if (osName.equalsIgnoreCase("Linux") == false) {
- return false;
- }
-
- String path = "/proc/" + pid;
- File file = new File(path);
-
- if (file.exists() == false) {
- LOG.info("Process " + pid + " is dead");
- return true;
- }
-
- return false;
- }
-
- /**
- * If it is backend, please set resultHandler, such as DefaultExecuteResultHandler
- * If it is frontend, ByteArrayOutputStream.toString get the result
- *
- * This function don't care whether the command is successfully or not
- *
- * @param command
- * @param environment
- * @param workDir
- * @param resultHandler
- * @return
- * @throws IOException
- */
- public static ByteArrayOutputStream launchProcess(String command, final Map environment,
- final String workDir, ExecuteResultHandler resultHandler)
- throws IOException {
-
- String[] cmdlist = command.split(" ");
-
- CommandLine cmd = new CommandLine(cmdlist[0]);
- for (String cmdItem : cmdlist) {
- if (StringUtils.isBlank(cmdItem) == false) {
- cmd.addArgument(cmdItem);
- }
- }
-
- DefaultExecutor executor = new DefaultExecutor();
-
- executor.setExitValue(0);
- if (StringUtils.isBlank(workDir) == false) {
- executor.setWorkingDirectory(new File(workDir));
- }
-
- ByteArrayOutputStream out = new ByteArrayOutputStream();
-
- PumpStreamHandler streamHandler = new PumpStreamHandler(out, out);
- if (streamHandler != null) {
- executor.setStreamHandler(streamHandler);
- }
-
- try {
- if (resultHandler == null) {
- executor.execute(cmd, environment);
- } else {
- executor.execute(cmd, environment, resultHandler);
- }
- }catch(ExecuteException e) {
-
- // @@@@
- // failed to run command
- }
-
- return out;
-
- }
-
- protected static java.lang.Process launchProcess(final String[] cmdlist,
- final Map<String, String> environment) throws IOException {
- ArrayList<String> buff = new ArrayList<String>();
- for (String tok : cmdlist) {
- if (!tok.isEmpty()) {
- buff.add(tok);
- }
- }
-
- ProcessBuilder builder = new ProcessBuilder(buff);
- builder.redirectErrorStream(true);
- Map<String, String> process_evn = builder.environment();
- for (Entry<String, String> entry : environment.entrySet()) {
- process_evn.put(entry.getKey(), entry.getValue());
- }
-
- return builder.start();
- }
-
- /**
- * @@@ it should use DefaultExecutor to start a process,
- * but some little problem have been found, such as exitCode/output string
- * so still use the old method to start process
- *
- * @param command
- * @param environment
- * @param backend
- * @return
- * @throws IOException
- */
- public static java.lang.Process launch_process(final String command,
- final Map<String, String> environment, boolean backend) throws IOException {
-
- if (backend == true) {
- new Thread(new Runnable() {
-
- @Override
- public void run() {
- String[] cmdlist = (new String("nohup " + command + " &")).split(" ");
- try {
- launchProcess(cmdlist, environment);
- } catch (IOException e) {
- LOG.error("Failed to run " + command + ":" + e.getCause(), e);
- }
- }
- }).start();
- return null;
- }else {
- String[] cmdlist = command.split(" ");
- return launchProcess(cmdlist, environment);
- }
- }
-
- public static String current_classpath() {
- return System.getProperty("java.class.path");
- }
-
- // public static String add_to_classpath(String classpath, String[] paths) {
- // for (String path : paths) {
- // classpath += ":" + path;
- // }
- // return classpath;
- // }
-
- public static String to_json(Map m) {
- return Utils.to_json(m);
- }
-
- public static Object from_json(String json) {
- return Utils.from_json(json);
- }
-
- public static <V> HashMap<V, Integer> multi_set(List<V> list) {
- HashMap<V, Integer> rtn = new HashMap<V, Integer>();
- for (V v : list) {
- int cnt = 1;
- if (rtn.containsKey(v)) {
- cnt += rtn.get(v);
- }
- rtn.put(v, cnt);
- }
- return rtn;
- }
-
- /**
- *
- * if the list exist repeat string, return the repeated string
- *
- * this function will be used to check wheter bolt or spout exist same id
- *
- * @param sets
- * @return
- */
- public static List<String> getRepeat(List<String> list) {
-
- List<String> rtn = new ArrayList<String>();
- Set<String> idSet = new HashSet<String>();
-
- for (String id : list) {
- if (idSet.contains(id)) {
- rtn.add(id);
- } else {
- idSet.add(id);
- }
- }
-
- return rtn;
- }
-
- /**
- * balance all T
- *
- * @param <T>
- * @param splitup
- * @return
- */
- public static <T> List<T> interleave_all(List<List<T>> splitup) {
- ArrayList<T> rtn = new ArrayList<T>();
- int maxLength = 0;
- for (List<T> e : splitup) {
- int len = e.size();
- if (maxLength < len) {
- maxLength = len;
- }
- }
-
- for (int i = 0; i < maxLength; i++) {
- for (List<T> e : splitup) {
- if (e.size() > i) {
- rtn.add(e.get(i));
- }
- }
- }
-
- return rtn;
- }
-
- public static Long bit_xor_vals(Object... vals) {
- Long rtn = 0l;
- for (Object n : vals) {
- rtn = bit_xor(rtn, n);
- }
-
- return rtn;
- }
-
- public static <T> Long bit_xor_vals(java.util.List<T> vals) {
- Long rtn = 0l;
- for (T n : vals) {
- rtn = bit_xor(rtn, n);
- }
-
- return rtn;
- }
-
- public static <T> Long bit_xor_vals_sets(java.util.Set<T> vals) {
- Long rtn = 0l;
- for (T n : vals) {
- rtn = bit_xor(rtn, n);
- }
- return rtn;
- }
-
- public static Long bit_xor(Object a, Object b) {
- Long rtn = 0l;
-
- if (a instanceof Long && b instanceof Long) {
- rtn = ((Long) a) ^ ((Long) b);
- return rtn;
- } else if (b instanceof Set) {
- Long bs = bit_xor_vals_sets((Set) b);
- return bit_xor(a, bs);
- } else if (a instanceof Set) {
- Long as = bit_xor_vals_sets((Set) a);
- return bit_xor(as, b);
- } else {
- Long ai = Long.parseLong(String.valueOf(a));
- Long bi = Long.parseLong(String.valueOf(b));
- rtn = ai ^ bi;
- return rtn;
- }
-
- }
-
- public static <V> List<V> mk_list(V... args) {
- ArrayList<V> rtn = new ArrayList<V>();
- for (V o : args) {
- rtn.add(o);
- }
- return rtn;
- }
-
- public static <V> List<V> mk_list(java.util.Set<V> args) {
- ArrayList<V> rtn = new ArrayList<V>();
- if (args != null) {
- for (V o : args) {
- rtn.add(o);
- }
- }
- return rtn;
- }
-
- public static <V> V[] mk_arr(V... args) {
- return args;
- }
-
- public static Long parseLong(Object o) {
- if (o == null) {
- return null;
- }
-
- if (o instanceof String) {
- return Long.valueOf(String.valueOf(o));
- } else if (o instanceof Integer) {
- Integer value = (Integer) o;
- return Long.valueOf((Integer) value);
- } else if (o instanceof Long) {
- return (Long) o;
- } else {
- throw new RuntimeException("Invalid value "
- + o.getClass().getName() + " " + o);
- }
- }
-
- public static Double parseDouble(Object o) {
- if (o == null) {
- return null;
- }
-
- if (o instanceof String) {
- return Double.valueOf(String.valueOf(o));
- } else if (o instanceof Integer) {
- Number value = (Integer) o;
- return value.doubleValue();
- } else if (o instanceof Long) {
- Number value = (Long) o;
- return value.doubleValue();
- } else if (o instanceof Double) {
- return (Double) o;
- } else {
- throw new RuntimeException("Invalid value "
- + o.getClass().getName() + " " + o);
- }
- }
-
- public static Long parseLong(Object o, long defaultValue) {
-
- if (o == null) {
- return defaultValue;
- }
-
- if (o instanceof String) {
- return Long.valueOf(String.valueOf(o));
- } else if (o instanceof Integer) {
- Integer value = (Integer) o;
- return Long.valueOf((Integer) value);
- } else if (o instanceof Long) {
- return (Long) o;
- } else {
- return defaultValue;
- }
- }
-
- public static Integer parseInt(Object o) {
- if (o == null) {
- return null;
- }
-
- if (o instanceof String) {
- return Integer.parseInt(String.valueOf(o));
- } else if (o instanceof Long) {
- long value = (Long) o;
- return Integer.valueOf((int) value);
- } else if (o instanceof Integer) {
- return (Integer) o;
- } else {
- throw new RuntimeException("Invalid value "
- + o.getClass().getName() + " " + o);
- }
- }
-
- public static Integer parseInt(Object o, int defaultValue) {
- if (o == null) {
- return defaultValue;
- }
-
- if (o instanceof String) {
- return Integer.parseInt(String.valueOf(o));
- } else if (o instanceof Long) {
- long value = (Long) o;
- return Integer.valueOf((int) value);
- } else if (o instanceof Integer) {
- return (Integer) o;
- } else {
- return defaultValue;
- }
- }
-
- public static boolean parseBoolean(Object o, boolean defaultValue) {
- if (o == null) {
- return defaultValue;
- }
-
- if (o instanceof String) {
- return Boolean.valueOf((String) o);
- } else if (o instanceof Boolean) {
- return (Boolean) o;
- } else {
- return defaultValue;
- }
- }
-
- public static <V> Set<V> listToSet(List<V> list) {
- if (list == null) {
- return null;
- }
-
- Set<V> set = new HashSet<V>();
- set.addAll(list);
- return set;
- }
-
- /**
- * Check whether the zipfile contain the resources
- *
- * @param zipfile
- * @param resources
- * @return
- */
- public static boolean zipContainsDir(String zipfile, String resources) {
-
- Enumeration<? extends ZipEntry> entries = null;
- try {
- entries = (new ZipFile(zipfile)).entries();
- while (entries != null && entries.hasMoreElements()) {
- ZipEntry ze = entries.nextElement();
- String name = ze.getName();
- if (name.startsWith(resources + "/")) {
- return true;
- }
- }
- } catch (IOException e) {
- // TODO Auto-generated catch block
- // e.printStackTrace();
- LOG.error(e + "zipContainsDir error");
- }
-
- return false;
- }
-
- public static Object add(Object oldValue, Object newValue) {
- if (oldValue == null) {
- return newValue;
- }
-
- if (oldValue instanceof Long) {
- if (newValue == null) {
- return (Long) oldValue;
- } else {
- return (Long) oldValue + (Long) newValue;
- }
- } else if (oldValue instanceof Double) {
- if (newValue == null) {
- return (Double) oldValue;
- } else {
- return (Double) oldValue + (Double) newValue;
- }
- } else {
- return null;
- }
- }
-
- public static Object mergeList(List<Object> list) {
- Object ret = null;
-
- for (Object value : list) {
- ret = add(ret, value);
- }
-
- return ret;
- }
-
- public static List<Object> mergeList(List<Object> result, Object add) {
- if (add instanceof Collection) {
- for (Object o : (Collection) add) {
- result.add(o);
- }
- } else if (add instanceof Set) {
- for (Object o : (Collection) add) {
- result.add(o);
- }
- } else {
- result.add(add);
- }
-
- return result;
- }
-
- public static List<Object> distinctList(List<Object> input) {
- List<Object> retList = new ArrayList<Object>();
-
- for (Object object : input) {
- if (retList.contains(object)) {
- continue;
- } else {
- retList.add(object);
- }
-
- }
-
- return retList;
- }
-
- public static <K, V> Map<K, V> mergeMapList(List<Map<K, V>> list) {
- Map<K, V> ret = new HashMap<K, V>();
-
- for (Map<K, V> listEntry : list) {
- if (listEntry == null) {
- continue;
- }
- for (Entry<K, V> mapEntry : listEntry.entrySet()) {
- K key = mapEntry.getKey();
- V value = mapEntry.getValue();
-
- V retValue = (V) add(ret.get(key), value);
-
- ret.put(key, retValue);
- }
- }
-
- return ret;
- }
-
- public static String formatSimpleDouble(Double value) {
- try {
- java.text.DecimalFormat form = new java.text.DecimalFormat(
- "##0.000");
- String s = form.format(value);
- return s;
- } catch (Exception e) {
- return "0.000";
- }
-
- }
-
- public static double formatDoubleDecPoint2(Double value) {
- try {
- java.text.DecimalFormat form = new java.text.DecimalFormat(
- "##.00");
- String s = form.format(value);
- return Double.valueOf(s);
- } catch (Exception e) {
- return 0.0;
- }
- }
-
- public static double formatDoubleDecPoint4(Double value) {
- try {
- java.text.DecimalFormat form = new java.text.DecimalFormat(
- "###.0000");
- String s = form.format(value);
- return Double.valueOf(s);
- } catch (Exception e) {
- return 0.0;
- }
- }
-
- public static Double convertToDouble(Object value) {
- Double ret;
-
- if (value == null) {
- ret = null;
- } else {
- if (value instanceof Integer) {
- ret = ((Integer) value).doubleValue();
- } else if (value instanceof Long) {
- ret = ((Long) value).doubleValue();
- } else if (value instanceof Float) {
- ret = ((Float) value).doubleValue();
- } else if (value instanceof Double) {
- ret = (Double) value;
- } else {
- ret = null;
- }
- }
-
- return ret;
- }
-
- public static String formatValue(Object value) {
- if (value == null) {
- return "0";
- }
-
- if (value instanceof Long) {
- return String.valueOf((Long) value);
- } else if (value instanceof Double) {
- return formatSimpleDouble((Double) value);
- } else {
- return String.valueOf(value);
- }
- }
-
- public static void sleepMs(long ms) {
- try {
- Thread.sleep(ms);
- } catch (InterruptedException e) {
-
- }
- }
-
- public static void sleepNs(int ns) {
- try {
- Thread.sleep(0, ns);
- } catch (InterruptedException e) {
-
- }
- }
-
- public static String HEXES = "0123456789ABCDEF";
-
- public static String toPrintableString(byte[] buf) {
- if (buf == null) {
- return null;
- }
-
- StringBuilder sb = new StringBuilder();
- int index = 0;
- for (byte b : buf) {
- if (index % 10 == 0) {
- sb.append("\n");
- }
- index++;
-
- sb.append(HEXES.charAt((b & 0xF0) >> 4));
- sb.append(HEXES.charAt((b & 0x0F)));
- sb.append(" ");
-
- }
-
- return sb.toString();
- }
-
- /**
- * @@@ Todo
- *
- * @return
- */
- public static Long getPhysicMemorySize() {
- Object object;
- try {
- object = ManagementFactory.getPlatformMBeanServer().getAttribute(
- new ObjectName("java.lang", "type", "OperatingSystem"),
- "TotalPhysicalMemorySize");
- } catch (Exception e) {
- LOG.warn("Failed to get system physical memory size,", e);
- return null;
- }
-
- Long ret = (Long) object;
-
- return ret;
- }
-
- public static String genLogName(String topology, Integer port) {
- return topology + "-worker-" + port + ".log";
- }
-
- public static String getLogFileName() {
- Enumeration<Appender> enumAppender = Logger.getRootLogger()
- .getAllAppenders();
- FileAppender fileAppender = null;
- while (enumAppender.hasMoreElements()) {
- Appender appender = enumAppender.nextElement();
- if (appender instanceof FileAppender) {
- fileAppender = (FileAppender) appender;
- break;
- }
- }
- if (fileAppender != null) {
- return fileAppender.getFile();
-
- }
-
- return null;
- }
-
- public static String getLogDir() {
- String file = JStormUtils.getLogFileName();
- if (file != null) {
- if (file.lastIndexOf(File.separator) < 0)
- return "";
- return file.substring(0, file.lastIndexOf(File.separator));
- }
-
- String stormHome = System.getProperty("jstorm.home");
- if (stormHome == null) {
- return "." + File.separator + "logs";
- } else {
- return stormHome + File.separator + "logs";
- }
- }
-
- public static void redirectOutput(String file) throws Exception {
-
- System.out.println("Redirect output to " + file);
-
- FileOutputStream workerOut = new FileOutputStream(new File(file));
-
- PrintStream ps = new PrintStream(new BufferedOutputStream(workerOut),
- true);
- System.setOut(ps);
- System.setErr(ps);
-
- LOG.info("Successfully redirect System.out to " + file);
-
- }
-
- public static RunnableCallback getDefaultKillfn() {
-
- return new AsyncLoopDefaultKill();
- }
-
- public static TreeMap<Integer, Integer> integer_divided(int sum,
- int num_pieces) {
- return Utils.integerDivided(sum, num_pieces);
- }
-
- public static <K, V> HashMap<K, V> filter_val(RunnableCallback fn,
- Map<K, V> amap) {
- HashMap<K, V> rtn = new HashMap<K, V>();
-
- for (Entry<K, V> entry : amap.entrySet()) {
- V value = entry.getValue();
- Object result = fn.execute(value);
-
- if (result == (Boolean) true) {
- rtn.put(entry.getKey(), value);
- }
- }
- return rtn;
- }
-
- public static List<Integer> getSupervisorPortList(Map conf) {
- List<Integer> portList = (List<Integer>) conf
- .get(Config.SUPERVISOR_SLOTS_PORTS);
- if (portList != null && portList.size() > 0) {
- return portList;
- }
-
- LOG.info("Generate port list through CPU cores and system memory size");
-
- double cpuWeight = ConfigExtension.getSupervisorSlotsPortCpuWeight(conf);
- int sysCpuNum = 4;
- try {
- sysCpuNum = Runtime.getRuntime().availableProcessors();
- }catch(Exception e) {
- LOG.info("Failed to get CPU cores, set cpu cores as 4");
- sysCpuNum = 4;
- }
- int cpuPortNum = (int)(sysCpuNum/cpuWeight);
- if (cpuPortNum < 1) {
-
- LOG.info("Invalid supervisor.slots.port.cpu.weight setting :"
- + cpuWeight + ", cpu cores:" + sysCpuNum);
- cpuPortNum = 1;
- }
-
- int memPortNum = Integer.MAX_VALUE;
- Long physicalMemSize = JStormUtils.getPhysicMemorySize();
- if (physicalMemSize == null) {
- LOG.info("Failed to get memory size");
- }else {
- LOG.info("Get system memory size :" + physicalMemSize);
- long workerMemSize = ConfigExtension.getMemSizePerWorker(conf);
- memPortNum = (int)(physicalMemSize/workerMemSize);
- if (memPortNum < 1) {
- LOG.info("Invalide worker.memory.size setting:" + workerMemSize );
- memPortNum = 4;
- }else if (memPortNum < 4){
- LOG.info("System memory is too small for jstorm");
- memPortNum = 4;
- }
- }
-
- int portNum = Math.min(cpuPortNum, memPortNum);
- if (portNum < 1) {
- portNum = 1;
- }
-
- int portBase = ConfigExtension.getSupervisorSlotsPortsBase(conf);
- portList = new ArrayList<Integer>();
- for(int i = 0; i < portNum; i++) {
- portList.add(portBase + i);
- }
-
- return portList;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/NetWorkUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/NetWorkUtils.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/NetWorkUtils.java
deleted file mode 100644
index 1a6c649..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/NetWorkUtils.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.ServerSocket;
-import java.net.UnknownHostException;
-import java.security.InvalidParameterException;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.log4j.Logger;
-
-/**
- * Network utilis
- *
- * @author yannian
- *
- */
-public class NetWorkUtils {
- private static Logger LOG = Logger.getLogger(NetWorkUtils.class);
-
- public static String hostname() {
- String hostname = null;
- try {
- hostname = InetAddress.getLocalHost().getCanonicalHostName();
- } catch (UnknownHostException e) {
- LOG.error("local_hostname", e);
- }
- return hostname;
- }
-
- public static String ip() {
- String hostname = null;
- try {
- hostname = InetAddress.getLocalHost().getHostAddress();
- } catch (UnknownHostException e) {
- LOG.error("local_hostname", e);
- }
- return hostname;
- }
-
- /**
- * Check whether the port is available to binding
- *
- * @param port
- * @return -1 means not available, others means available
- * @throws IOException
- */
- public static int tryPort(int port) throws IOException {
- ServerSocket socket = new ServerSocket(port);
- int rtn = socket.getLocalPort();
- socket.close();
- return rtn;
- }
-
- /**
- * get one available port
- *
- * @return -1 means failed, others means one availablePort
- */
- public static int getAvailablePort() {
- return availablePort(0);
- }
-
- /**
- * Check whether the port is available to binding
- *
- * @param prefered
- * @return -1 means not available, others means available
- */
- public static int availablePort(int prefered) {
- int rtn = -1;
- try {
- rtn = tryPort(prefered);
- } catch (IOException e) {
-
- }
- return rtn;
- }
-
- public static String host2Ip(String host) {
- InetAddress address = null;
- try {
- address = InetAddress.getByName(host);
- } catch (UnknownHostException e) {
- LOG.warn("NetWorkUtil can't transfer hostname(" + host + ") to ip, return hostname", e);
- return host;
- }
- return address.getHostAddress();
- }
-
- public static String ip2Host(String ip) {
- InetAddress address = null;
- try {
- address = InetAddress.getByName(ip);
- } catch (UnknownHostException e) {
- LOG.warn("NetWorkUtil can't transfer ip(" + ip + ") to hostname, return ip", e);
- return ip;
- }
- return address.getHostName();
- }
-
- public static boolean equals(String host1, String host2) {
-
-
- if (StringUtils.equalsIgnoreCase(host1, host2) == true) {
- return true;
- }
-
- if (host1 == null || host2 == null) {
- return false;
- }
-
- String ip1 = host2Ip(host1);
- String ip2 = host2Ip(host2);
-
- return StringUtils.equalsIgnoreCase(ip1, ip2);
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/OlderFileFilter.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/OlderFileFilter.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/OlderFileFilter.java
deleted file mode 100644
index a3063d6..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/OlderFileFilter.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.File;
-import java.io.FileFilter;
-
-/**
- * filter the older file, skip the files' modify time which is less sec than now
- *
- * @author lixin
- *
- */
-public class OlderFileFilter implements FileFilter {
-
- private int seconds;
-
- public OlderFileFilter(int seconds) {
- this.seconds = seconds;
- }
-
- @Override
- public boolean accept(File pathname) {
-
- long current_time = System.currentTimeMillis();
-
- return (pathname.isFile() && (pathname.lastModified() + seconds * 1000 <= current_time))
- || pathname.isDirectory();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/Pair.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/Pair.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/Pair.java
deleted file mode 100644
index 43aa583..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/Pair.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-public class Pair<F, S> {
- private F first;
- private S second;
-
- public Pair(F first, S second) {
- this.first = first;
- this.second = second;
- }
-
- public F getFirst() {
- return first;
- }
-
- public void setFirst(F first) {
- this.first = first;
- }
-
- public S getSecond() {
- return second;
- }
-
- public void setSecond(S second) {
- this.second = second;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/PathUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/PathUtils.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/PathUtils.java
deleted file mode 100644
index 26cddf5..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/PathUtils.java
+++ /dev/null
@@ -1,130 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.File;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.commons.io.FileUtils;
-import org.apache.log4j.Logger;
-
-/**
- *
- * @author yannian
- *
- */
-public class PathUtils {
- static Logger LOG = Logger.getLogger(PathUtils.class);
-
- public static final String SEPERATOR = "/";
-
- /**
- * split path as list
- *
- * @param path
- * @return
- */
- public static List<String> tokenize_path(String path) {
- String[] toks = path.split(SEPERATOR);
- java.util.ArrayList<String> rtn = new ArrayList<String>();
- for (String str : toks) {
- if (!str.isEmpty()) {
- rtn.add(str);
- }
- }
- return rtn;
- }
-
- public static String toks_to_path(List<String> toks) {
- StringBuffer buff = new StringBuffer();
- buff.append(SEPERATOR);
- int size = toks.size();
- for (int i = 0; i < size; i++) {
- buff.append(toks.get(i));
- if (i < (size - 1)) {
- buff.append(SEPERATOR);
- }
-
- }
- return buff.toString();
- }
-
- public static String normalize_path(String path) {
- String rtn = toks_to_path(tokenize_path(path));
- return rtn;
- }
-
- public static String parent_path(String path) {
- List<String> toks = tokenize_path(path);
- int size = toks.size();
- if (size > 0) {
- toks.remove(size - 1);
- }
- return toks_to_path(toks);
- }
-
- public static String full_path(String parent, String name) {
- return normalize_path(parent + SEPERATOR + name);
- }
-
- public static boolean exists_file(String path) {
- return (new File(path)).exists();
- }
-
- public static void rmr(String path) throws IOException {
- LOG.debug("Rmr path " + path);
- if (exists_file(path)) {
- FileUtils.forceDelete(new File(path));
- }
-
- }
-
- public static void local_mkdirs(String path) throws IOException {
- LOG.debug("Making dirs at" + path);
- FileUtils.forceMkdir(new File(path));
- }
-
- public static void rmpath(String path) {
- LOG.debug("Removing path " + path);
- boolean isdelete = (new File(path)).delete();
- if (!isdelete) {
- throw new RuntimeException("Failed to delete " + path);
- }
- }
-
- public static void touch(String path) throws IOException {
- LOG.debug("Touching file at" + path);
- boolean success = (new File(path)).createNewFile();
- if (!success) {
- throw new RuntimeException("Failed to touch " + path);
- }
- }
-
- public static List<String> read_dir_contents(String dir) {
- ArrayList<String> rtn = new ArrayList<String>();
- if (exists_file(dir)) {
- File[] list = (new File(dir)).listFiles();
- for (File f : list) {
- rtn.add(f.getName());
- }
- }
- return rtn;
- }
-
- public static String getCanonicalPath(String fileName) {
- String ret = null;
- File file = new File(fileName);
- if (file.exists()) {
- try {
- ret = file.getCanonicalPath();
- } catch (IOException e) {
- LOG.error("", e);
- }
- }else {
- LOG.warn(fileName + " doesn't exist ");
- }
-
- return ret;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RandomRange.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RandomRange.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RandomRange.java
deleted file mode 100644
index b06ed5f..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RandomRange.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.util.ArrayList;
-
-/**
- * Shuffle the Range, This class is used in shuffle grouping, it is better than
- * random, which can't make sure balance.
- *
- * @author yannian
- *
- */
-public class RandomRange {
- private ArrayList<Integer> rr;
- private Integer amt;
-
- public RandomRange(int amt) {
- this.amt = amt;
- this.rr = rotating_random_range(amt);
- }
-
- public Integer nextInt() {
- return this.acquire_random_range_id();
- }
-
- private ArrayList<Integer> rotating_random_range(int amt) {
-
- ArrayList<Integer> range = new ArrayList<Integer>();
- for (int i = 0; i < amt; i++) {
- range.add(i);
- }
-
- ArrayList<Integer> rtn = new ArrayList<Integer>();
- for (int i = 0; i < amt; i++) {
- int index = (int) (Math.random() * range.size());
- rtn.add(range.remove(index));
- }
-
- return rtn;
- }
-
- private synchronized int acquire_random_range_id() {
- int ret = this.rr.remove(0);
- if (this.rr.size() == 0) {
- this.rr.addAll(rotating_random_range(this.amt));
- }
- return ret;
- }
-
- public static void main(String[] args) {
- RandomRange test = new RandomRange(10);
-
- for (int i = 0; i < 10; i++) {
- System.out.println(test.acquire_random_range_id());
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RotatingMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RotatingMap.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RotatingMap.java
deleted file mode 100644
index 78095a6..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RotatingMap.java
+++ /dev/null
@@ -1,133 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.util.Deque;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.LinkedBlockingDeque;
-
-/**
- * RotatingMap must be used under thread-safe environment
- *
- * Expires keys that have not been updated in the configured number of seconds.
- * The algorithm used will take between expirationSecs and expirationSecs * (1 +
- * 1 / (numBuckets-1)) to actually expire the message.
- *
- * get, put, remove, containsKey, and size take O(numBuckets) time to run.
- *
- */
-public class RotatingMap<K, V> implements TimeOutMap<K, V>{
- // this default ensures things expire at most 50% past the expiration time
- private static final int DEFAULT_NUM_BUCKETS = 3;
-
-
- private Deque<Map<K, V>> _buckets;
-
- private ExpiredCallback _callback;
-
- private final Object lock = new Object();
-
- public RotatingMap(int numBuckets, ExpiredCallback<K, V> callback, boolean isSingleThread) {
- if (numBuckets < 2) {
- throw new IllegalArgumentException("numBuckets must be >= 2");
- }
- if (isSingleThread == true) {
- _buckets = new LinkedList<Map<K, V>>();
- }else {
- _buckets = new LinkedBlockingDeque<Map<K, V>>();
- }
-
- for (int i = 0; i < numBuckets; i++) {
- _buckets.add(new ConcurrentHashMap<K, V>());
- }
-
- _callback = callback;
- }
-
- public RotatingMap(ExpiredCallback<K, V> callback) {
- this(DEFAULT_NUM_BUCKETS, callback, false);
- }
-
- public RotatingMap(int numBuckets) {
- this(numBuckets, null, false);
- }
-
- public Map<K, V> rotate() {
- Map<K, V> dead = _buckets.removeLast();
- _buckets.addFirst(new ConcurrentHashMap<K, V>());
- if (_callback != null) {
- for (Entry<K, V> entry : dead.entrySet()) {
- _callback.expire(entry.getKey(), entry.getValue());
- }
- }
- return dead;
- }
-
- @Override
- public boolean containsKey(K key) {
- for (Map<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return true;
- }
- }
- return false;
- }
-
- @Override
- public V get(K key) {
- for (Map<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.get(key);
- }
- }
- return null;
- }
-
- @Override
- public void putHead(K key, V value) {
- _buckets.peekFirst().put(key, value);
- }
-
- @Override
- public void put(K key, V value) {
- Iterator<Map<K, V>> it = _buckets.iterator();
- Map<K, V> bucket = it.next();
- bucket.put(key, value);
- while (it.hasNext()) {
- bucket = it.next();
- bucket.remove(key);
- }
- }
-
-
- /**
- * Remove item from Rotate
- *
- * On the side of performance, scanning from header is faster On the side of
- * logic, it should scan from the end to first.
- *
- * @param key
- * @return
- */
- @Override
- public Object remove(K key) {
- for (Map<K, V> bucket : _buckets) {
- Object value = bucket.remove(key);
- if (value != null) {
- return value;
- }
- }
- return null;
- }
-
- @Override
- public int size() {
- int size = 0;
- for (Map<K, V> bucket : _buckets) {
- size += bucket.size();
- }
- return size;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RunCounter.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RunCounter.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RunCounter.java
deleted file mode 100644
index 69a1bb8..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/RunCounter.java
+++ /dev/null
@@ -1,85 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.Serializable;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.log4j.Logger;
-
-public class RunCounter implements Serializable{
-
- private static final long serialVersionUID = 2177944366059817622L;
- private static final Logger LOG = Logger.getLogger(RunCounter.class);
- private AtomicLong total = new AtomicLong(0);
- private AtomicLong times = new AtomicLong(0);
- private AtomicLong values = new AtomicLong(0);
-
- private IntervalCheck intervalCheck;
-
- private final String id;
-
-
- public RunCounter() {
- this("", RunCounter.class);
- }
-
- public RunCounter(String id) {
- this(id, RunCounter.class);
- }
-
- public RunCounter(Class tclass) {
- this(tclass.getName(), tclass);
-
- }
-
- public RunCounter(String id, Class tclass) {
- this.id = id;
-
- intervalCheck = new IntervalCheck();
- intervalCheck.setInterval(60);
- }
-
- public Double count(long value) {
- long totalValue = total.incrementAndGet();
- long timesValue = times.incrementAndGet();
- long v = values.addAndGet(value);
-
- Double pass = intervalCheck.checkAndGet();
- if (pass != null) {
- times.set(0);
- values.set(0);
-
- Double tps = timesValue / pass;
-
- StringBuilder sb = new StringBuilder();
- sb.append(id);
- sb.append(", tps:" + tps);
- sb.append(", avg:" + ((double) v) / timesValue);
- sb.append(", total:" + totalValue);
- LOG.info(sb.toString());
-
- return tps;
- }
-
- return null;
- }
-
- public void cleanup() {
-
- LOG.info(id + ", total:" + total);
- }
-
-
-
- public IntervalCheck getIntervalCheck() {
- return intervalCheck;
- }
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/SmartThread.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/SmartThread.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/SmartThread.java
deleted file mode 100644
index 22aa2f5..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/SmartThread.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-/**
- *
- * @author yannian
- *
- */
-public interface SmartThread {
- public void start();
-
- public void join() throws InterruptedException;;
-
- public void interrupt();
-
- public Boolean isSleeping();
-
- public void cleanup();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheMap.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheMap.java
deleted file mode 100644
index 210e6c8..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheMap.java
+++ /dev/null
@@ -1,147 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-
-/**
- * Expires keys that have not been updated in the configured number of seconds.
- * The algorithm used will take between expirationSecs and expirationSecs * (1 +
- * 1 / (numBuckets-1)) to actually expire the message.
- *
- * get, put, remove, containsKey, and size take O(numBuckets) time to run.
- *
- *
- */
-public class TimeCacheMap<K, V> implements TimeOutMap<K, V> {
- // this default ensures things expire at most 50% past the expiration time
- private static final int DEFAULT_NUM_BUCKETS = 3;
-
- private LinkedList<HashMap<K, V>> _buckets;
-
- private final Object _lock = new Object();
- private Thread _cleaner;
- private ExpiredCallback _callback;
-
- public TimeCacheMap(int expirationSecs, int numBuckets,
- ExpiredCallback<K, V> callback) {
- if (numBuckets < 2) {
- throw new IllegalArgumentException("numBuckets must be >= 2");
- }
- _buckets = new LinkedList<HashMap<K, V>>();
- for (int i = 0; i < numBuckets; i++) {
- _buckets.add(new HashMap<K, V>());
- }
-
- _callback = callback;
- final long expirationMillis = expirationSecs * 1000L;
- final long sleepTime = expirationMillis / (numBuckets - 1);
- _cleaner = new Thread(new Runnable() {
- public void run() {
-
- while (true) {
- Map<K, V> dead = null;
- JStormUtils.sleepMs(sleepTime);
- synchronized (_lock) {
- dead = _buckets.removeLast();
- _buckets.addFirst(new HashMap<K, V>());
- }
- if (_callback != null) {
- for (Entry<K, V> entry : dead.entrySet()) {
- _callback.expire(entry.getKey(), entry.getValue());
- }
- }
- }
- }
- });
- _cleaner.setDaemon(true);
- _cleaner.start();
- }
-
- public TimeCacheMap(int expirationSecs, ExpiredCallback<K, V> callback) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS, callback);
- }
-
- public TimeCacheMap(int expirationSecs) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS);
- }
-
- public TimeCacheMap(int expirationSecs, int numBuckets) {
- this(expirationSecs, numBuckets, null);
- }
-
- @Override
- public boolean containsKey(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return true;
- }
- }
- return false;
- }
- }
-
- @Override
- public V get(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.get(key);
- }
- }
- return null;
- }
- }
-
- @Override
- public void putHead(K key, V value) {
- synchronized (_lock) {
- _buckets.getFirst().put(key, value);
- }
- }
-
- @Override
- public void put(K key, V value) {
- synchronized (_lock) {
- Iterator<HashMap<K, V>> it = _buckets.iterator();
- HashMap<K, V> bucket = it.next();
- bucket.put(key, value);
- while (it.hasNext()) {
- bucket = it.next();
- bucket.remove(key);
- }
- }
- }
-
- @Override
- public Object remove(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.remove(key);
- }
- }
- return null;
- }
- }
-
- @Override
- public int size() {
- synchronized (_lock) {
- int size = 0;
- for (HashMap<K, V> bucket : _buckets) {
- size += bucket.size();
- }
- return size;
- }
- }
-
- public void cleanup() {
- _cleaner.interrupt();
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheQueue.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheQueue.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheQueue.java
deleted file mode 100644
index acc8221..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeCacheQueue.java
+++ /dev/null
@@ -1,168 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.concurrent.LinkedBlockingDeque;
-
-import org.apache.log4j.Logger;
-
-/**
- * Expires keys that have not been updated in the configured number of seconds.
- * The algorithm used will take between expirationSecs and expirationSecs * (1 +
- * 1 / (numBuckets-1)) to actually expire the message.
- *
- * get, put, remove, containsKey, and size take O(numBuckets) time to run.
- *
- * The advantage of this design is that the expiration thread only locks the
- * object for O(1) time, meaning the object is essentially always available for
- * poll/offer
- */
-public class TimeCacheQueue<K> {
- // this default ensures things expire at most 50% past the expiration time
- public static final int DEFAULT_NUM_BUCKETS = 3;
-
- public static interface ExpiredCallback<K> {
- public void expire(K entry);
- }
-
- public static class DefaultExpiredCallback<K> implements ExpiredCallback<K> {
- protected static final Logger LOG = Logger
- .getLogger(TimeCacheQueue.DefaultExpiredCallback.class);
-
- protected String queueName;
-
- public DefaultExpiredCallback(String queueName) {
- this.queueName = queueName;
- }
-
- public void expire(K entry) {
- LOG.info("TimeCacheQueue " + queueName + " entry:" + entry
- + ", timeout");
- }
- }
-
- protected LinkedList<LinkedBlockingDeque<K>> _buckets;
-
- protected final Object _lock = new Object();
- protected Thread _cleaner;
- protected ExpiredCallback _callback;
-
- public TimeCacheQueue(int expirationSecs, int numBuckets,
- ExpiredCallback<K> callback) {
- if (numBuckets < 2) {
- throw new IllegalArgumentException("numBuckets must be >= 2");
- }
- _buckets = new LinkedList<LinkedBlockingDeque<K>>();
- for (int i = 0; i < numBuckets; i++) {
- _buckets.add(new LinkedBlockingDeque<K>());
- }
-
- _callback = callback;
- final long expirationMillis = expirationSecs * 1000L;
- final long sleepTime = expirationMillis / (numBuckets - 1);
- _cleaner = new Thread(new Runnable() {
- public void run() {
- try {
- while (true) {
- LinkedBlockingDeque<K> dead = null;
-
- Thread.sleep(sleepTime);
-
- synchronized (_lock) {
- dead = _buckets.removeLast();
- _buckets.addFirst(new LinkedBlockingDeque<K>());
- }
- if (_callback != null) {
- for (K entry : dead) {
- _callback.expire(entry);
- }
- }
- }
- } catch (InterruptedException ex) {
-
- }
- }
- });
- _cleaner.setDaemon(true);
- _cleaner.start();
- }
-
- public TimeCacheQueue(int expirationSecs, ExpiredCallback<K> callback) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS, callback);
- }
-
- public TimeCacheQueue(int expirationSecs) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS, null);
- }
-
- public TimeCacheQueue(int expirationSecs, int numBuckets) {
- this(expirationSecs, numBuckets, null);
- }
-
- public boolean containsKey(K entry) {
- synchronized (_lock) {
- for (LinkedBlockingDeque<K> bucket : _buckets) {
- if (bucket.contains(entry)) {
- return true;
- }
- }
- return false;
- }
- }
-
- public K poll() {
- synchronized (_lock) {
- Iterator<LinkedBlockingDeque<K>> itor = _buckets
- .descendingIterator();
- while (itor.hasNext()) {
- LinkedBlockingDeque<K> bucket = itor.next();
- K entry = bucket.poll();
- if (entry != null) {
- return entry;
- }
- }
-
- return null;
- }
- }
-
- public void offer(K entry) {
- synchronized (_lock) {
- LinkedBlockingDeque<K> bucket = _buckets.getFirst();
-
- bucket.offer(entry);
- }
- }
-
- public void remove(K entry) {
- synchronized (_lock) {
- for (LinkedBlockingDeque<K> bucket : _buckets) {
- if (bucket.contains(entry)) {
- bucket.remove(entry);
- return;
- }
- }
- return;
- }
- }
-
- public int size() {
- synchronized (_lock) {
- int size = 0;
- for (LinkedBlockingDeque<K> bucket : _buckets) {
- size += bucket.size();
- }
- return size;
- }
- }
-
- @Override
- protected void finalize() throws Throwable {
- try {
- _cleaner.interrupt();
- } finally {
- super.finalize();
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeFormat.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeFormat.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeFormat.java
deleted file mode 100644
index b82572b..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeFormat.java
+++ /dev/null
@@ -1,199 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Date;
-
-import org.apache.log4j.Logger;
-
-/**
- *
- * @author longda
- *
- */
-public class TimeFormat {
- public static Logger log = Logger.getLogger(TimeFormat.class);
-
- public static final long ONE_SECOND_MILLISECONDS = 1000;
-
- public static final long ONE_MINUTE_SECONDS = 60;
-
- public static final long ONE_HOUR_MINUTES = 60;
-
- public static final long ONE_DAY_HOURS = 24;
-
- public static final long ONE_MINUTE_MILLISECONDS = ONE_MINUTE_SECONDS
- * ONE_SECOND_MILLISECONDS;
-
- public static final long ONE_HOUR_MILLISECONDS = ONE_HOUR_MINUTES
- * ONE_MINUTE_MILLISECONDS;
-
- public static final long ONE_DAY_MILLISECONDS = ONE_DAY_HOURS
- * ONE_HOUR_MILLISECONDS;
-
- public static Date convertDate(String dateStr, String format) {
- Date date = null;
- try {
- if (format != null) {
- SimpleDateFormat simpleDateFormat = new SimpleDateFormat(format);
- date = simpleDateFormat.parse(dateStr);
- } else {
- date = new Date(dateStr);
- }
-
- } catch (Exception ex) {
- log.error("Failed to convert " + dateStr + " to Date, format:"
- + format);
- return null;
- }
- return date;
- }
-
- public static String convertStr(Date date, String format) {
- String ret = null;
- try {
-
- SimpleDateFormat sdf = new SimpleDateFormat(format);
-
- ret = sdf.format(date);
-
- } catch (Exception e) {
- log.error("Failed to convert " + date + " to String, format:"
- + format);
- return null;
- }
- return ret;
- }
-
- public static Date getYear(String dateStr) {
- return convertDate(dateStr, "yyyy");
- }
-
- public static String getYear(Date date) {
- return convertStr(date, "yyyy");
- }
-
- public static Date getMonth(String dateStr) {
- return convertDate(dateStr, "yyyyMM");
- }
-
- public static String getMonth(Date date) {
- return convertStr(date, "yyyyMM");
- }
-
- public static Date getDay(String dateStr) {
- return convertDate(dateStr, "yyyyMMdd");
- }
-
- public static String getDay(Date date) {
- return convertStr(date, "yyyyMMdd");
- }
-
- public static Date getHour(String dateStr) {
- return convertDate(dateStr, "yyyyMMddHH");
- }
-
- public static String getHour(Date date) {
- return convertStr(date, "yyyyMMddHH");
- }
-
- public static Date getMinute(String dateStr) {
- return convertDate(dateStr, "yyyyMMddHHmm");
- }
-
- public static String getMinute(Date date) {
- return convertStr(date, "yyyyMMddHHmm");
- }
-
- public static Date getSecond(String dateStr) {
- return convertDate(dateStr, "yyyyMMddHHmmss");
- }
-
- public static String getSecond(Date date) {
- return convertStr(date, "yyyyMMddHHmmss");
- }
-
- public static String getHourMin(String dateStr) {
- Date date = convertDate(dateStr, null);
- if (date == null) {
- return null;
- }
-
- return getHourMin(date);
- }
-
- public static String getHourMin(Date date) {
- String output = null;
- try {
- SimpleDateFormat sdf = new SimpleDateFormat("HH:mm");
- output = sdf.format(date);
- } catch (Exception e) {
- return null;
- }
- return output;
- }
-
- public static Date getToday() {
- Date now = new Date();
-
- String todayStr = getDay(now);
-
- return getDay(todayStr);
- }
-
- public static Date getYesterday() {
- Date now = new Date();
-
- Calendar yesterdayCal = Calendar.getInstance();
- yesterdayCal.setTime(now);
- yesterdayCal.add(Calendar.DATE, -1);
-
- String yesterdayStr = getDay(yesterdayCal.getTime());
-
- return getDay(yesterdayStr);
- }
-
- /**
- * get the days number pass from 1970-00-00
- *
- * @return
- */
- public static long getDayNum(Date date) {
- long passMs = date.getTime() + (8 * 1000 * 60 * 60);
-
- return (passMs / 1000 / 60 / 60 / 24);
- }
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
-
- Date date = new Date();
-
- String dateStr = getDay(date);
-
- Date newDate = getDay(dateStr);
-
- System.out.println("new date:" + newDate);
-
- Date current = new Date();
- Calendar tomorrow = Calendar.getInstance();
-
- tomorrow.setTime(current);
- tomorrow.add(Calendar.DATE, 1);
- tomorrow.set(Calendar.AM_PM, Calendar.AM);
- tomorrow.set(Calendar.HOUR, 2);
- tomorrow.set(Calendar.MINUTE, 0);
- Date startTime = tomorrow.getTime();
-
- long hourdiff = (startTime.getTime() - current.getTime())
- / ONE_HOUR_MILLISECONDS;
-
- System.out.println("Current:" + current + ", tomorrow" + startTime
- + ", diff hour" + hourdiff);
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeOutMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeOutMap.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeOutMap.java
deleted file mode 100644
index 4bf2143..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeOutMap.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-
-public interface TimeOutMap<K, V> {
-
- public boolean containsKey(K key);
-
- public V get(K key);
-
- public void putHead(K key, V value);
-
- public void put(K key, V value);
-
- public Object remove(K key);
-
- public int size() ;
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeUtils.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeUtils.java
deleted file mode 100644
index 0536cee..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/TimeUtils.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import backtype.storm.utils.Time;
-
-/**
- * Time utils
- *
- * @author yannian
- *
- */
-public class TimeUtils {
-
- /**
- * Take care of int overflow
- *
- * @return
- */
- public static int current_time_secs() {
- return (int)(Time.currentTimeMillis() / 1000);
- }
-
- /**
- * Take care of int overflow
- *
- * @return
- */
- public static int time_delta(int time_secs) {
- return current_time_secs() - time_secs;
- }
-
- public static long time_delta_ms(long time_ms) {
- return System.currentTimeMillis() - time_ms;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Factory.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Factory.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Factory.java
deleted file mode 100644
index 2d203f3..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Factory.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.io.IOException;
-import java.net.InetSocketAddress;
-
-import org.apache.zookeeper.server.NIOServerCnxnFactory;
-import org.apache.zookeeper.server.ZooKeeperServer;
-
-public class Factory extends NIOServerCnxnFactory {
-
- public Factory(InetSocketAddress addr, int maxcc) throws IOException {
- super();
- this.configure(addr, maxcc);
- }
-
- public ZooKeeperServer getZooKeeperServer() {
- return zkServer;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkConstant.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkConstant.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkConstant.java
deleted file mode 100644
index c07add5..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkConstant.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-public class ZkConstant {
-
- public static final String ZK_SEPERATOR = "/";
-
- public static final String ASSIGNMENTS_BAK = "assignments_bak";
-
- public static final String ASSIGNMENTS_BAK_SUBTREE;
-
- public static final String NIMBUS_SLAVE_ROOT = "nimbus_slave";
-
- public static final String NIMBUS_SLAVE_SUBTREE;
-
- static {
- ASSIGNMENTS_BAK_SUBTREE = ZK_SEPERATOR + ASSIGNMENTS_BAK;
- NIMBUS_SLAVE_SUBTREE = ZK_SEPERATOR + NIMBUS_SLAVE_ROOT;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkCreateModes.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkCreateModes.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkCreateModes.java
deleted file mode 100644
index 886146f..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkCreateModes.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.util.HashMap;
-
-import org.apache.zookeeper.CreateMode;
-
-public class ZkCreateModes {
-
- private static HashMap<CreateMode, String> map;
-
- static {
- map = new HashMap<CreateMode, String>();
- map.put(CreateMode.EPHEMERAL, ":ephemeral");
- map.put(CreateMode.PERSISTENT, ":persistent");
- }
-
- public static String getStateName(CreateMode mode) {
- return map.get(mode);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkEventTypes.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkEventTypes.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkEventTypes.java
deleted file mode 100644
index cd2bb4e..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkEventTypes.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.util.HashMap;
-
-import org.apache.zookeeper.Watcher;
-
-public class ZkEventTypes {
-
- private static HashMap<Watcher.Event.EventType, String> map;
-
- static {
- map = new HashMap<Watcher.Event.EventType, String>();
-
- map.put(Watcher.Event.EventType.None, ":none");
- map.put(Watcher.Event.EventType.NodeCreated, ":node-created");
- map.put(Watcher.Event.EventType.NodeDeleted, ":node-deleted");
- map.put(Watcher.Event.EventType.NodeDataChanged, ":node-data-changed");
- map.put(Watcher.Event.EventType.NodeChildrenChanged,
- ":node-children-changed");
-
- }
-
- public static String getStateName(Watcher.Event.EventType type) {
- return map.get(type);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkKeeperStates.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkKeeperStates.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkKeeperStates.java
deleted file mode 100644
index 9a9a979..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkKeeperStates.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.util.HashMap;
-
-import org.apache.zookeeper.Watcher;
-
-public class ZkKeeperStates {
-
- private static HashMap<Watcher.Event.KeeperState, String> map;
-
- static {
- map = new HashMap<Watcher.Event.KeeperState, String>();
-
- map.put(Watcher.Event.KeeperState.AuthFailed, ":auth-failed");
- map.put(Watcher.Event.KeeperState.SyncConnected, ":connected");
- map.put(Watcher.Event.KeeperState.Disconnected, ":disconnected");
- map.put(Watcher.Event.KeeperState.Expired, ":expired");
- }
-
- public static String getStateName(Watcher.Event.KeeperState state) {
- return map.get(state);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkTool.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkTool.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkTool.java
deleted file mode 100644
index 2d504d9..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/ZkTool.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.cluster.ClusterState;
-import com.alibaba.jstorm.cluster.DistributedClusterState;
-import com.google.common.collect.Maps;
-
-public class ZkTool {
- private static Logger LOG = Logger.getLogger(ZkTool.class);
-
- public static final String READ_CMD = "read";
-
- public static final String RM_CMD = "rm";
-
- public static void usage() {
- LOG.info("Read ZK node's data, please do as following:");
- LOG.info(ZkTool.class.getName() + " read zkpath");
-
- LOG.info("\nDelete topology backup assignment, please do as following:");
- LOG.info(ZkTool.class.getName() + " rm topologyname");
- }
-
- public static String getData(DistributedClusterState zkClusterState,
- String path) throws Exception {
- byte[] data = zkClusterState.get_data(path, false);
- if (data == null || data.length == 0) {
- return null;
- }
-
- Object obj = Utils.deserialize(data, null);
-
- return obj.toString();
- }
-
- public static void readData(String path) {
-
- DistributedClusterState zkClusterState = null;
-
- try {
- conf.put(Config.STORM_ZOOKEEPER_ROOT, "/");
-
- zkClusterState = new DistributedClusterState(conf);
-
- String data = getData(zkClusterState, path);
- if (data == null) {
- LOG.info("No data of " + path);
- }
-
- StringBuilder sb = new StringBuilder();
-
- sb.append("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n");
- sb.append("Zk node " + path + "\n");
- sb.append("Readable data:" + data + "\n");
- sb.append("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n\n");
-
- LOG.info(sb.toString());
-
- } catch (Exception e) {
- if (zkClusterState == null) {
- LOG.error("Failed to connect ZK ", e);
- } else {
- LOG.error("Failed to read data " + path + "\n", e);
- }
- } finally {
- if (zkClusterState != null) {
- zkClusterState.close();
- }
- }
- }
-
- public static void rmBakTopology(String topologyName) {
-
- DistributedClusterState zkClusterState = null;
-
- try {
-
- zkClusterState = new DistributedClusterState(conf);
-
- String path = ZkConstant.ASSIGNMENTS_BAK_SUBTREE;
- List<String> bakTopologys = zkClusterState
- .get_children(path, false);
-
- for (String tid : bakTopologys) {
- if (tid.equals(topologyName)) {
- LOG.info("Find backup " + topologyName);
-
- String topologyPath = assignment_bak_path(topologyName);
- zkClusterState.delete_node(topologyPath);
-
- LOG.info("Successfully delete topology " + topologyName
- + " backup Assignment");
-
- return;
- }
- }
-
- LOG.info("No backup topology " + topologyName + " Assignment");
-
- } catch (Exception e) {
- if (zkClusterState == null) {
- LOG.error("Failed to connect ZK ", e);
- } else {
- LOG.error("Failed to delete old topology " + topologyName
- + "\n", e);
- }
- } finally {
- if (zkClusterState != null) {
- zkClusterState.close();
- }
- }
-
- }
-
- private static Map conf;
-
- /**
- * @param args
- * @throws Exception
- */
- public static void main(String[] args) throws Exception {
- // TODO Auto-generated method stub
-
- if (args.length < 2) {
- LOG.info("Invalid parameter");
- usage();
- return;
- }
-
- conf = Utils.readStormConfig();
-
- if (args[0].equalsIgnoreCase(READ_CMD)) {
-
- readData(args[1]);
-
- } else if (args[0].equalsIgnoreCase(RM_CMD)) {
- rmBakTopology(args[1]);
- }
-
- }
-
- /*******************************************************************/
-
- public static String assignment_bak_path(String id) {
- return ZkConstant.ASSIGNMENTS_BAK_SUBTREE + ZkConstant.ZK_SEPERATOR
- + id;
- }
-
- @SuppressWarnings("rawtypes")
- public static ClusterState mk_distributed_cluster_state(Map _conf)
- throws Exception {
- return new DistributedClusterState(_conf);
- }
-
- public static Map<String, String> get_followers(ClusterState cluster_state)
- throws Exception {
- Map<String, String> ret = Maps.newHashMap();
- List<String> followers = cluster_state.get_children(
- ZkConstant.NIMBUS_SLAVE_SUBTREE, false);
- if (followers == null || followers.size() == 0) {
- return ret;
- }
- for (String follower : followers) {
- if (follower != null) {
- String uptime = new String(cluster_state.get_data(
- ZkConstant.NIMBUS_SLAVE_SUBTREE + ZkConstant.ZK_SEPERATOR
- + follower, false));
- ret.put(follower, uptime);
- }
- }
- return ret;
- }
-
- // public static List<String> get_follower_hosts(ClusterState cluster_state)
- // throws Exception {
- // List<String> followers = cluster_state.get_children(
- // ZkConstant.NIMBUS_SLAVE_SUBTREE, false);
- // if (followers == null || followers.size() == 0) {
- // return Lists.newArrayList();
- // }
- // return followers;
- // }
- //
- // public static List<String> get_follower_hbs(ClusterState cluster_state)
- // throws Exception {
- // List<String> ret = Lists.newArrayList();
- // List<String> followers = get_follower_hosts(cluster_state);
- // for (String follower : followers) {
- // ret.add(new String(cluster_state.get_data(ZkConstant.NIMBUS_SLAVE_SUBTREE
- // + ZkConstant.ZK_SEPERATOR + follower, false)));
- // }
- // return ret;
- // }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java
deleted file mode 100644
index 5bf0b16..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/zk/Zookeeper.java
+++ /dev/null
@@ -1,217 +0,0 @@
-package com.alibaba.jstorm.zk;
-
-import java.io.File;
-import java.io.IOException;
-import java.net.InetSocketAddress;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.api.CuratorEvent;
-import org.apache.curator.framework.api.CuratorEventType;
-import org.apache.curator.framework.api.CuratorListener;
-import org.apache.curator.framework.api.UnhandledErrorListener;
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.KeeperException;
-import org.apache.zookeeper.WatchedEvent;
-import org.apache.zookeeper.ZooDefs;
-import org.apache.zookeeper.data.Stat;
-import org.apache.zookeeper.server.ZooKeeperServer;
-
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.callback.DefaultWatcherCallBack;
-import com.alibaba.jstorm.callback.WatcherCallBack;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.jstorm.utils.PathUtils;
-
-/**
- * ZK simple wrapper
- *
- * @author yannian
- *
- */
-public class Zookeeper {
-
- private static Logger LOG = Logger.getLogger(Zookeeper.class);
-
- public CuratorFramework mkClient(Map conf, List<String> servers,
- Object port, String root) {
- return mkClient(conf, servers, port, root, new DefaultWatcherCallBack());
- }
-
- /**
- * connect ZK, register Watch/unhandle Watch
- *
- * @return
- */
- public CuratorFramework mkClient(Map conf, List<String> servers,
- Object port, String root, final WatcherCallBack watcher) {
-
- CuratorFramework fk = Utils.newCurator(conf, servers, port, root);
-
- fk.getCuratorListenable().addListener(new CuratorListener() {
- @Override
- public void eventReceived(CuratorFramework _fk, CuratorEvent e)
- throws Exception {
- if (e.getType().equals(CuratorEventType.WATCHED)) {
- WatchedEvent event = e.getWatchedEvent();
-
- watcher.execute(event.getState(), event.getType(),
- event.getPath());
- }
-
- }
- });
-
- fk.getUnhandledErrorListenable().addListener(
- new UnhandledErrorListener() {
- @Override
- public void unhandledError(String msg, Throwable error) {
- String errmsg = "Unrecoverable Zookeeper error, halting process: "
- + msg;
- LOG.error(errmsg, error);
- JStormUtils.halt_process(1,
- "Unrecoverable Zookeeper error");
-
- }
- });
- fk.start();
- return fk;
- }
-
- public String createNode(CuratorFramework zk, String path, byte[] data,
- org.apache.zookeeper.CreateMode mode) throws Exception {
-
- String npath = PathUtils.normalize_path(path);
-
- return zk.create().withMode(mode).withACL(ZooDefs.Ids.OPEN_ACL_UNSAFE)
- .forPath(npath, data);
- }
-
- public String createNode(CuratorFramework zk, String path, byte[] data)
- throws Exception {
- return createNode(zk, path, data,
- org.apache.zookeeper.CreateMode.PERSISTENT);
- }
-
- public boolean existsNode(CuratorFramework zk, String path, boolean watch)
- throws Exception {
- Stat stat = null;
- if (watch) {
- stat = zk.checkExists().watched()
- .forPath(PathUtils.normalize_path(path));
- } else {
- stat = zk.checkExists().forPath(PathUtils.normalize_path(path));
- }
- return stat != null;
- }
-
- public void deleteNode(CuratorFramework zk, String path) throws Exception {
- zk.delete().forPath(PathUtils.normalize_path(path));
- }
-
- public void mkdirs(CuratorFramework zk, String path) throws Exception {
-
- String npath = PathUtils.normalize_path(path);
-
- // the node is "/"
- if (npath.equals("/")) {
- return;
- }
-
- // the node exist
- if (existsNode(zk, npath, false)) {
- return;
- }
-
- mkdirs(zk, PathUtils.parent_path(npath));
- try {
- createNode(zk, npath, JStormUtils.barr((byte) 7),
- org.apache.zookeeper.CreateMode.PERSISTENT);
- } catch (KeeperException e) {
- ;// this can happen when multiple clients doing mkdir at same
- // time
- LOG.warn("zookeeper mkdirs for path" + path, e);
-
- }
-
- }
-
- public byte[] getData(CuratorFramework zk, String path, boolean watch)
- throws Exception {
- String npath = PathUtils.normalize_path(path);
- try {
- if (existsNode(zk, npath, watch)) {
- if (watch) {
- return zk.getData().watched().forPath(npath);
- } else {
- return zk.getData().forPath(npath);
- }
- }
- } catch (KeeperException e) {
- LOG.error("zookeeper getdata for path" + path, e);
- }
-
- return null;
- }
-
- public List<String> getChildren(CuratorFramework zk, String path,
- boolean watch) throws Exception {
-
- String npath = PathUtils.normalize_path(path);
-
- if (watch) {
- return zk.getChildren().watched().forPath(npath);
- } else {
- return zk.getChildren().forPath(npath);
- }
- }
-
- public Stat setData(CuratorFramework zk, String path, byte[] data)
- throws Exception {
- String npath = PathUtils.normalize_path(path);
- return zk.setData().forPath(npath, data);
- }
-
- public boolean exists(CuratorFramework zk, String path, boolean watch)
- throws Exception {
- return existsNode(zk, path, watch);
- }
-
- public void deletereRcursive(CuratorFramework zk, String path)
- throws Exception {
-
- String npath = PathUtils.normalize_path(path);
-
- if (existsNode(zk, npath, false)) {
-
- List<String> childs = getChildren(zk, npath, false);
-
- for (String child : childs) {
-
- String childFullPath = PathUtils.full_path(npath, child);
-
- deletereRcursive(zk, childFullPath);
- }
-
- deleteNode(zk, npath);
- }
- }
-
- public static Factory mkInprocessZookeeper(String localdir, int port)
- throws IOException, InterruptedException {
- LOG.info("Starting inprocess zookeeper at port " + port + " and dir "
- + localdir);
- File localfile = new File(localdir);
- ZooKeeperServer zk = new ZooKeeperServer(localfile, localfile, 2000);
- Factory factory = new Factory(new InetSocketAddress(port), 0);
- factory.startup(zk);
- return factory;
- }
-
- public void shutdownInprocessZookeeper(Factory handle) {
- handle.shutdown();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-client/pom.xml b/jstorm-client/pom.xml
deleted file mode 100644
index 6a0f465..0000000
--- a/jstorm-client/pom.xml
+++ /dev/null
@@ -1,194 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-
- <parent>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-all</artifactId>
- <version>0.9.6.3</version>
- <relativePath>..</relativePath>
- </parent>
- <!--<parent>
- <groupId>com.taobao</groupId>
- <artifactId>parent</artifactId>
- <version>1.0.2</version>
- </parent>-->
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>0.9.6.3</version>
- <packaging>jar</packaging>
- <name>${project.artifactId}-${project.version}</name>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>2.3.2</version>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- <executions>
- <execution>
- <id>attach-sources</id>
- <goals>
- <goal>jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <properties>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <powermock.version>1.4.11</powermock.version>
-
- </properties>
- <dependencies>
- <dependency>
- <groupId>org.clojure</groupId>
- <artifactId>clojure</artifactId>
- <version>1.5.1</version>
- </dependency>
- <dependency>
- <groupId>commons-io</groupId>
- <artifactId>commons-io</artifactId>
- <version>2.4</version>
- </dependency>
- <dependency>
- <groupId>org.apache.commons</groupId>
- <artifactId>commons-exec</artifactId>
- <version>1.1</version>
- </dependency>
- <dependency>
- <groupId>org.apache.httpcomponents</groupId>
- <artifactId>httpclient</artifactId>
- <version>4.3.2</version>
- </dependency>
- <dependency>
- <groupId>storm</groupId>
- <artifactId>libthrift7</artifactId>
- <version>0.7.0</version>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>clj-time</groupId>
- <artifactId>clj-time</artifactId>
- <version>0.4.1</version>
- </dependency>
- <dependency>
- <groupId>org.apache.curator</groupId>
- <artifactId>curator-framework</artifactId>
- <version>2.5.0</version>
- <exclusions>
- <exclusion>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
- <dependency>
- <groupId>backtype</groupId>
- <artifactId>jzmq</artifactId>
- <version>2.1.0</version>
- </dependency>
- <dependency>
- <groupId>com.esotericsoftware.kryo</groupId>
- <artifactId>kryo</artifactId>
- <version>2.17</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>fastjson</artifactId>
- <version>1.1.41</version>
- </dependency>
- <!-- keep compatible with storm, some old project use gson -->
- <dependency>
- <groupId>com.googlecode.json-simple</groupId>
- <artifactId>json-simple</artifactId>
- <version>1.1</version>
- </dependency>
- <dependency>
- <groupId>storm</groupId>
- <artifactId>carbonite</artifactId>
- <version>1.5.0</version>
- </dependency>
-
- <dependency>
- <groupId>org.yaml</groupId>
- <artifactId>snakeyaml</artifactId>
- <version>1.11</version>
- </dependency>
- <dependency>
- <groupId>com.lmax</groupId>
- <artifactId>disruptor</artifactId>
- <version>3.2.1</version>
- </dependency>
- <dependency>
- <groupId>io.netty</groupId>
- <artifactId>netty</artifactId>
- <version>3.9.0.Final</version>
- </dependency>
- <dependency>
- <groupId>storm</groupId>
- <artifactId>jgrapht</artifactId>
- <version>0.8.3</version>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.10</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-module-junit4</artifactId>
- <version>${powermock.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.16</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>1.7.5</version>
- </dependency>
- <dependency>
- <groupId>commons-cli</groupId>
- <artifactId>commons-cli</artifactId>
- <version>1.2</version>
- </dependency>
- <dependency>
- <groupId>org.codehaus.plexus</groupId>
- <artifactId>plexus-compiler-javac</artifactId>
- <version>1.8.1</version>
- </dependency>
- <dependency>
- <groupId>com.google.code.gson</groupId>
- <artifactId>gson</artifactId>
- <version>2.3.1</version>
- </dependency>
-
- <!-- <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId>
- <version>1.0.13</version> </dependency> <dependency> <groupId>org.slf4j</groupId>
- <artifactId>log4j-over-slf4j</artifactId> <version>1.7.5</version> </dependency> -->
-
- </dependencies>
-</project>
[47/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IFailValueSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IFailValueSpout.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IFailValueSpout.java
deleted file mode 100644
index 913709d..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IFailValueSpout.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.alibaba.jstorm.client.spout;
-
-import java.util.List;
-
-/**
- * This interface will list emit values when tuple fails
- *
- * if spout implement this interface,
- * spout won't call ISpout.fail() when tuple fail
- *
- * @author longda
- */
-public interface IFailValueSpout {
- void fail(Object msgId, List<Object> values);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/ClusterState.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/ClusterState.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/ClusterState.java
deleted file mode 100644
index de64e2f..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/ClusterState.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package com.alibaba.jstorm.cluster;
-
-import java.util.List;
-import java.util.UUID;
-
-import com.alibaba.jstorm.callback.ClusterStateCallback;
-
-
-/**
- * All ZK interface
- *
- * @author yannian
- *
- */
-public interface ClusterState {
- public void set_ephemeral_node(String path, byte[] data) throws Exception;
-
- public void delete_node(String path) throws Exception;
-
- public void set_data(String path, byte[] data) throws Exception;
-
- public byte[] get_data(String path, boolean watch) throws Exception;
-
- public List<String> get_children(String path, boolean watch)
- throws Exception;
-
- public void mkdirs(String path) throws Exception;
-
- public void tryToBeLeader(String path, byte[] host) throws Exception;
-
- public void close();
-
- public UUID register(ClusterStateCallback callback);
-
- public ClusterStateCallback unregister(UUID id);
-
- public boolean node_existed(String path, boolean watch) throws Exception;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/DistributedClusterState.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/DistributedClusterState.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/DistributedClusterState.java
deleted file mode 100644
index 95224f0..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/cluster/DistributedClusterState.java
+++ /dev/null
@@ -1,175 +0,0 @@
-package com.alibaba.jstorm.cluster;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.Watcher.Event.EventType;
-import org.apache.zookeeper.Watcher.Event.KeeperState;
-
-import backtype.storm.Config;
-
-import com.alibaba.jstorm.callback.ClusterStateCallback;
-import com.alibaba.jstorm.callback.WatcherCallBack;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.jstorm.utils.PathUtils;
-import com.alibaba.jstorm.zk.Zookeeper;
-
-/**
- * All ZK interface implementation
- *
- * @author yannian.mu
- *
- */
-public class DistributedClusterState implements ClusterState {
-
- private static Logger LOG = Logger.getLogger(DistributedClusterState.class);
-
- private Zookeeper zkobj = new Zookeeper();
- private CuratorFramework zk;
- private WatcherCallBack watcher;
-
- /**
- * why run all callbacks, when receive one event
- */
- private ConcurrentHashMap<UUID, ClusterStateCallback> callbacks = new ConcurrentHashMap<UUID, ClusterStateCallback>();
-
- private Map<Object, Object> conf;
- private AtomicBoolean active;
-
- public DistributedClusterState(Map<Object, Object> _conf) throws Exception {
- conf = _conf;
-
- // just mkdir STORM_ZOOKEEPER_ROOT dir
- CuratorFramework _zk = mkZk();
- String path = String.valueOf(conf.get(Config.STORM_ZOOKEEPER_ROOT));
- zkobj.mkdirs(_zk, path);
- _zk.close();
-
- active = new AtomicBoolean(true);
-
- watcher = new WatcherCallBack() {
- @Override
- public void execute(KeeperState state, EventType type, String path) {
- if (active.get()) {
- if (!(state.equals(KeeperState.SyncConnected))) {
- LOG.warn("Received event " + state + ":" + type + ":"
- + path + " with disconnected Zookeeper.");
- } else {
- LOG.info("Received event " + state + ":" + type + ":"
- + path);
- }
-
- if (!type.equals(EventType.None)) {
- for (Entry<UUID, ClusterStateCallback> e : callbacks
- .entrySet()) {
- ClusterStateCallback fn = e.getValue();
- fn.execute(type, path);
- }
- }
- }
- }
- };
- zk = null;
- zk = mkZk(watcher);
-
- }
-
- @SuppressWarnings("unchecked")
- private CuratorFramework mkZk() throws IOException {
- return zkobj.mkClient(conf,
- (List<String>) conf.get(Config.STORM_ZOOKEEPER_SERVERS),
- conf.get(Config.STORM_ZOOKEEPER_PORT), "");
- }
-
- @SuppressWarnings("unchecked")
- private CuratorFramework mkZk(WatcherCallBack watcher)
- throws NumberFormatException, IOException {
- return zkobj.mkClient(conf,
- (List<String>) conf.get(Config.STORM_ZOOKEEPER_SERVERS),
- conf.get(Config.STORM_ZOOKEEPER_PORT),
- String.valueOf(conf.get(Config.STORM_ZOOKEEPER_ROOT)), watcher);
- }
-
- @Override
- public void close() {
- this.active.set(false);
- zk.close();
- }
-
- @Override
- public void delete_node(String path) throws Exception {
- zkobj.deletereRcursive(zk, path);
- }
-
- @Override
- public List<String> get_children(String path, boolean watch)
- throws Exception {
- return zkobj.getChildren(zk, path, watch);
- }
-
- @Override
- public byte[] get_data(String path, boolean watch) throws Exception {
- return zkobj.getData(zk, path, watch);
- }
-
- @Override
- public void mkdirs(String path) throws Exception {
- zkobj.mkdirs(zk, path);
-
- }
-
- @Override
- public void set_data(String path, byte[] data) throws Exception {
- if (data.length > (JStormUtils.SIZE_1_K * 800))
- throw new Exception("Writing 800k+ data into ZK is not allowed!");
- if (zkobj.exists(zk, path, false)) {
- zkobj.setData(zk, path, data);
- } else {
- zkobj.mkdirs(zk, PathUtils.parent_path(path));
- zkobj.createNode(zk, path, data, CreateMode.PERSISTENT);
- }
-
- }
-
- @Override
- public void set_ephemeral_node(String path, byte[] data) throws Exception {
- zkobj.mkdirs(zk, PathUtils.parent_path(path));
- if (zkobj.exists(zk, path, false)) {
- zkobj.setData(zk, path, data);
- } else {
- zkobj.createNode(zk, path, data, CreateMode.EPHEMERAL);
- }
- }
-
- @Override
- public UUID register(ClusterStateCallback callback) {
- UUID id = UUID.randomUUID();
- this.callbacks.put(id, callback);
- return id;
- }
-
- @Override
- public ClusterStateCallback unregister(UUID id) {
- return this.callbacks.remove(id);
- }
-
- @Override
- public boolean node_existed(String path, boolean watch) throws Exception {
- // TODO Auto-generated method stub
- return zkobj.existsNode(zk, path, watch);
- }
-
- @Override
- public void tryToBeLeader(String path, byte[] host) throws Exception {
- // TODO Auto-generated method stub
- zkobj.createNode(zk, path, host, CreateMode.EPHEMERAL);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StatBuckets.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StatBuckets.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StatBuckets.java
deleted file mode 100644
index 26701fd..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StatBuckets.java
+++ /dev/null
@@ -1,124 +0,0 @@
-package com.alibaba.jstorm.common.stats;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class StatBuckets {
-
- public static final Integer NUM_STAT_BUCKETS = 20;
-
- public static final Integer MINUTE_WINDOW = 600;
- public static final Integer HOUR_WINDOW = 10800;
- public static final Integer DAY_WINDOW = 86400;
-
- public static final String MINUTE_WINDOW_STR = "0d0h10m0s";
- public static final String HOUR_WINDOW_STR = "0d3h0m0s";
- public static final String DAY_WINDOW_STR = "1d0h0m0s";
- public static final String ALL_WINDOW_STR = "All-time";
-
- public static Integer[] STAT_BUCKETS = { MINUTE_WINDOW / NUM_STAT_BUCKETS,
- HOUR_WINDOW / NUM_STAT_BUCKETS, DAY_WINDOW / NUM_STAT_BUCKETS };
-
- private static final String[][] PRETTYSECDIVIDERS = {
- new String[] { "s", "60" }, new String[] { "m", "60" },
- new String[] { "h", "24" }, new String[] { "d", null } };
-
- /**
- * Service b
- *
- * @param key
- * @return
- */
- public static String parseTimeKey(Integer key) {
- if (key == 0) {
- return ALL_WINDOW_STR;
- } else {
- return String.valueOf(key);
- }
- }
-
- /**
- *
- * Default is the latest result
- *
- * @param showKey
- * @return
- */
- public static String getTimeKey(String showKey) {
- String window = null;
- if (showKey == null) {
- window = String.valueOf(MINUTE_WINDOW);
- } else if (showKey.equals(MINUTE_WINDOW_STR)) {
- window = String.valueOf(MINUTE_WINDOW);
- } else if (showKey.equals(HOUR_WINDOW_STR)) {
- window = String.valueOf(HOUR_WINDOW);
- } else if (showKey.equals(DAY_WINDOW_STR)) {
- window = String.valueOf(DAY_WINDOW);
- } else if (showKey.equals(ALL_WINDOW_STR)) {
- window = ALL_WINDOW_STR;
- } else {
- window = String.valueOf(MINUTE_WINDOW);
- }
-
- return window;
- }
-
- /**
- * Default is the latest result
- *
- * @param showStr
- * @return
- */
- public static String getShowTimeStr(String showStr) {
- if (showStr == null) {
- return MINUTE_WINDOW_STR;
- } else if (showStr.equals(MINUTE_WINDOW_STR)
- || showStr.equals(HOUR_WINDOW_STR)
- || showStr.equals(DAY_WINDOW_STR)
- || showStr.equals(ALL_WINDOW_STR)) {
- return showStr;
-
- } else {
- return MINUTE_WINDOW_STR;
- }
-
- }
-
- /**
- * seconds to string like 1d20h30m40s
- *
- * @param secs
- * @return
- */
- public static String prettyUptimeStr(int secs) {
- int diversize = PRETTYSECDIVIDERS.length;
-
- List<String> tmp = new ArrayList<String>();
- int div = secs;
- for (int i = 0; i < diversize; i++) {
- if (PRETTYSECDIVIDERS[i][1] != null) {
- Integer d = Integer.parseInt(PRETTYSECDIVIDERS[i][1]);
- tmp.add(div % d + PRETTYSECDIVIDERS[i][0]);
- div = div / d;
- } else {
- tmp.add(div + PRETTYSECDIVIDERS[i][0]);
- }
- }
-
- String rtn = "";
- int tmpSzie = tmp.size();
- for (int j = tmpSzie - 1; j > -1; j--) {
- rtn += tmp.get(j);
- }
- return rtn;
- }
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StaticsType.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StaticsType.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StaticsType.java
deleted file mode 100644
index e9b76b3..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/common/stats/StaticsType.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.alibaba.jstorm.common.stats;
-
-public enum StaticsType {
- emitted, send_tps, recv_tps, acked, failed, transferred, process_latencies;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormHistogram.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormHistogram.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormHistogram.java
deleted file mode 100644
index 863deaa..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormHistogram.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import com.codahale.metrics.Histogram;
-
-public class JStormHistogram {
- private static boolean isEnable = true;
-
- public static boolean isEnable() {
- return isEnable;
- }
-
- public static void setEnable(boolean isEnable) {
- JStormHistogram.isEnable = isEnable;
- }
-
- private Histogram instance;
- private String name;
-
- public JStormHistogram(String name, Histogram instance) {
- this.name = name;
- this.instance = instance;
- }
-
- public void update(int value) {
- if (isEnable == true) {
- instance.update(value);
- }
- }
-
- public void update(long value) {
- if (isEnable == true) {
- instance.update(value);
- }
- }
-
- public Histogram getInstance() {
- return instance;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormTimer.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormTimer.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormTimer.java
deleted file mode 100644
index dac94db..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/JStormTimer.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-
-import java.util.concurrent.atomic.AtomicReference;
-
-import org.apache.log4j.Logger;
-
-import com.codahale.metrics.Timer;
-
-public class JStormTimer {
- private static final Logger LOG = Logger.getLogger(JStormTimer.class);
- private static boolean isEnable = true;
-
- public static boolean isEnable() {
- return isEnable;
- }
-
- public static void setEnable(boolean isEnable) {
- JStormTimer.isEnable = isEnable;
- }
-
-
- private Timer instance;
- private String name;
- public JStormTimer(String name, Timer instance) {
- this.name = name;
- this.instance = instance;
- this.timerContext = new AtomicReference<Timer.Context>();
- }
-
- /**
- * This logic isn't perfect, it will miss metrics when it is called
- * in the same time. But this method performance is better than
- * create a new instance wrapper Timer.Context
- */
- private AtomicReference<Timer.Context> timerContext = null;
- public void start() {
- if (JStormTimer.isEnable == false) {
- return ;
- }
-
- if (timerContext.compareAndSet(null, instance.time()) == false) {
- LOG.warn("Already start timer " + name);
- return ;
- }
-
- }
-
- public void stop() {
- Timer.Context context = timerContext.getAndSet(null);
- if (context != null) {
- context.stop();
- }
- }
-
- public Timer getInstance() {
- return instance;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricDef.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricDef.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricDef.java
deleted file mode 100644
index 882057e..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricDef.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-public class MetricDef {
- // metric name for task
- public static final String DESERIALIZE_QUEUE = "Deserialize_Queue";
- public static final String DESERIALIZE_TIME = "Deserialize_Time";
- public static final String SERIALIZE_QUEUE = "Serialize_Queue";
- public static final String SERIALIZE_TIME = "Serialize_Time";
- public static final String EXECUTE_QUEUE = "Executor_Queue";
- public static final String EXECUTE_TIME = "Execute_Time";
- public static final String ACKER_TIME = "Acker_Time";
- public static final String EMPTY_CPU_RATIO = "Empty_Cpu_Ratio";
- public static final String PENDING_MAP = "Pending_Num";
- public static final String EMIT_TIME = "Emit_Time";
-
- // metric name for worker
- public static final String NETWORK_MSG_TRANS_TIME = "Network_Transmit_Time";
- public static final String NETTY_SERV_DECODE_TIME = "Netty_Server_Decode_Time";
- public static final String DISPATCH_TIME = "Virtual_Port_Dispatch_Time";
- public static final String DISPATCH_QUEUE = "Virtual_Port_Dispatch_Queue";
- public static final String BATCH_TUPLE_TIME = "Batch_Tuple_Time";
- public static final String BATCH_TUPLE_QUEUE = "Batch_Tuple_Queue";
- public static final String DRAINER_TIME = "Drainer_Time";
- public static final String DRAINER_QUEUE = "Drainer_Queue";
- public static final String NETTY_CLI_SEND_TIME = "Netty_Client_Send_Time";
- public static final String NETTY_CLI_BATCH_SIZE = "Netty_Client_Send_Batch_Size";
- public static final String NETTY_CLI_SEND_PENDING = "Netty_Client_Send_Pendings";
- public static final String NETTY_CLI_SYNC_BATCH_QUEUE = "Netty_Client_Sync_BatchQueue";
- public static final String NETTY_CLI_SYNC_DISR_QUEUE = "Netty_Client_Sync_DisrQueue";
-
- public static final String ZMQ_SEND_TIME = "ZMQ_Send_Time";
- public static final String ZMQ_SEND_MSG_SIZE = "ZMQ_Send_MSG_Size";
-
- public static final String CPU_USED_RATIO = "Used_Cpu";
- public static final String MEMORY_USED = "Used_Memory";
-
- public static final String REMOTE_CLI_ADDR = "Remote_Client_Address";
- public static final String REMOTE_SERV_ADDR = "Remote_Server_Address";
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricInfo.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricInfo.java
deleted file mode 100644
index 09a2a10..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricInfo.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import com.codahale.metrics.Metric;
-
-public class MetricInfo {
- private Metric metric;
- private String prefix;
- private String name;
-
- public MetricInfo(String prefix, String name, Metric metric) {
- this.prefix = prefix;
- this.name = name;
- this.metric = metric;
- }
-
- public String getPrefix() {
- return prefix;
- }
-
- public String getName() {
- return name;
- }
-
- public Metric getMetric() {
- return metric;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricJstack.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricJstack.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricJstack.java
deleted file mode 100644
index c60525a..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/MetricJstack.java
+++ /dev/null
@@ -1,123 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import java.lang.management.ManagementFactory;
-import java.lang.management.ThreadInfo;
-import java.lang.management.ThreadMXBean;
-
-import com.codahale.metrics.Gauge;
-
-public class MetricJstack implements Gauge<String> {
-
- private String getTaskName(long id, String name) {
- if (name == null) {
- return Long.toString(id);
- }
- return id + " (" + name + ")";
- }
-
- public String dumpThread() throws Exception {
- StringBuilder writer = new StringBuilder();
-
- ThreadMXBean threadMXBean = ManagementFactory.getThreadMXBean();
-
- boolean contention = threadMXBean.isThreadContentionMonitoringEnabled();
-
- long[] threadIds = threadMXBean.getAllThreadIds();
- writer.append(threadIds.length + " active threads:");
- for (long tid : threadIds) {
- writer.append(tid).append(" ");
- }
- writer.append("\n");
-
- long[] deadLockTids = threadMXBean.findDeadlockedThreads();
- if (deadLockTids != null) {
- writer.append(threadIds.length + " deadlocked threads:");
- for (long tid : deadLockTids) {
- writer.append(tid).append(" ");
- }
- writer.append("\n");
- }
-
- long[] deadLockMonitorTids = threadMXBean
- .findMonitorDeadlockedThreads();
- if (deadLockMonitorTids != null) {
- writer.append(threadIds.length + " deadlocked monitor threads:");
- for (long tid : deadLockMonitorTids) {
- writer.append(tid).append(" ");
- }
- writer.append("\n");
- }
-
- for (long tid : threadIds) {
- ThreadInfo info = threadMXBean
- .getThreadInfo(tid, Integer.MAX_VALUE);
- if (info == null) {
- writer.append(" Inactive").append("\n");
- continue;
- }
- writer.append(
- "Thread "
- + getTaskName(info.getThreadId(),
- info.getThreadName()) + ":").append("\n");
- Thread.State state = info.getThreadState();
- writer.append(" State: " + state).append("\n");
- writer.append(" Blocked count: " + info.getBlockedCount()).append(
- "\n");
- writer.append(" Waited count: " + info.getWaitedCount()).append(
- "\n");
- writer.append(" Cpu time:")
- .append(threadMXBean.getThreadCpuTime(tid) / 1000000)
- .append("ms").append("\n");
- writer.append(" User time:")
- .append(threadMXBean.getThreadUserTime(tid) / 1000000)
- .append("ms").append("\n");
- if (contention) {
- writer.append(" Blocked time: " + info.getBlockedTime())
- .append("\n");
- writer.append(" Waited time: " + info.getWaitedTime()).append(
- "\n");
- }
- if (state == Thread.State.WAITING) {
- writer.append(" Waiting on " + info.getLockName())
- .append("\n");
- } else if (state == Thread.State.BLOCKED) {
- writer.append(" Blocked on " + info.getLockName())
- .append("\n");
- writer.append(
- " Blocked by "
- + getTaskName(info.getLockOwnerId(),
- info.getLockOwnerName())).append("\n");
- }
-
- }
- for (long tid : threadIds) {
- ThreadInfo info = threadMXBean
- .getThreadInfo(tid, Integer.MAX_VALUE);
- if (info == null) {
- writer.append(" Inactive").append("\n");
- continue;
- }
-
- writer.append(
- "Thread "
- + getTaskName(info.getThreadId(),
- info.getThreadName()) + ": Stack").append(
- "\n");
- for (StackTraceElement frame : info.getStackTrace()) {
- writer.append(" " + frame.toString()).append("\n");
- }
- }
-
- return writer.toString();
- }
-
- @Override
- public String getValue() {
- try {
- return dumpThread();
- } catch (Exception e) {
- return "Failed to get jstack thread info";
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/Metrics.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/Metrics.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/Metrics.java
deleted file mode 100644
index 3e50c0a..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/Metrics.java
+++ /dev/null
@@ -1,330 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.utils.DisruptorQueue;
-
-import com.alibaba.jstorm.client.metric.MetricCallback;
-//import com.alibaba.jstorm.daemon.worker.Worker;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Meter;
-import com.codahale.metrics.Metric;
-import com.codahale.metrics.MetricRegistry;
-import com.codahale.metrics.MetricSet;
-import com.codahale.metrics.Snapshot;
-import com.codahale.metrics.Timer;
-import com.codahale.metrics.jvm.GarbageCollectorMetricSet;
-import com.codahale.metrics.jvm.MemoryUsageGaugeSet;
-import com.codahale.metrics.jvm.ThreadStatesGaugeSet;
-
-public class Metrics {
-
- public enum MetricType {
- TASK, WORKER
- }
-
- private static final Logger LOG = Logger.getLogger(Metrics.class);
- //private static final Logger DEFAULT_LOG = Logger.getLogger(Worker.class);
-
- private static final MetricRegistry metrics = new MetricRegistry();
-
- private static final MetricRegistry jstack = new MetricRegistry();
-
- private static Map<String, List<MetricInfo>> taskMetricMap = new ConcurrentHashMap<String, List<MetricInfo>>();
- private static List<MetricInfo> workerMetricList = new ArrayList<MetricInfo>();
- private static UserDefMetric userDefMetric = new UserDefMetric();
-
- static {
- try {
- registerAll("jvm-thread-state", new ThreadStatesGaugeSet());
- registerAll("jvm-mem", new MemoryUsageGaugeSet());
- registerAll("jvm-gc", new GarbageCollectorMetricSet());
-
- jstack.register("jstack", new MetricJstack());
- } catch (Exception e) {
- LOG.warn("Failed to regist jvm metrics");
- }
- }
-
- public static MetricRegistry getMetrics() {
- return metrics;
- }
-
- public static MetricRegistry getJstack() {
- return jstack;
- }
-
- public static UserDefMetric getUserDefMetric() {
- return userDefMetric;
- }
-
- public static boolean unregister(String name) {
- LOG.info("Unregister metric " + name);
- return metrics.remove(name);
- }
-
- public static boolean unregister(String prefix, String name, String id, Metrics.MetricType type) {
- String MetricName;
- if (prefix == null)
- MetricName = name;
- else
- MetricName = prefix + "-" + name;
- boolean ret = unregister(MetricName);
-
- if (ret == true) {
- List<MetricInfo> metricList = null;
- if (type == MetricType.WORKER) {
- metricList = workerMetricList;
- } else {
- metricList = taskMetricMap.get(id);
- }
-
- boolean found = false;
- if (metricList != null) {
- for (MetricInfo metric : metricList) {
- if(metric.getName().equals(name)) {
- if (prefix != null) {
- if (metric.getPrefix().equals(prefix)) {
- metricList.remove(metric);
- found = true;
- break;
- }
- } else {
- if (metric.getPrefix() == null) {
- metricList.remove(metric);
- found = true;
- break;
- }
- }
- }
- }
- }
- if (found != true)
- LOG.warn("Name " + name + " is not found when unregister from metricList");
- }
- return ret;
- }
-
- public static boolean unregisterUserDefine(String name) {
- boolean ret = unregister(name);
-
- if (ret == true) {
- userDefMetric.remove(name);
- userDefMetric.unregisterCallback(name);
- }
-
- return ret;
- }
-
- public static <T extends Metric> T register(String name, T metric)
- throws IllegalArgumentException {
- LOG.info("Register Metric " + name);
- return metrics.register(name, metric);
- }
-
- public static <T extends Metric> T register(String prefix, String name, T metric,
- String idStr, MetricType metricType) throws IllegalArgumentException {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- T ret = register(metricName, metric);
- updateMetric(prefix, name, metricType, ret, idStr);
- return ret;
- }
-
- public static void registerUserDefine(String name, Object metric, MetricCallback callback) {
- if(metric instanceof Gauge<?>) {
- userDefMetric.addToGauge(name, (Gauge<?>)metric);
- } else if (metric instanceof Timer) {
- userDefMetric.addToTimer(name, (Timer)metric);
- } else if (metric instanceof Counter) {
- userDefMetric.addToCounter(name, (Counter)metric);
- } else if (metric instanceof Meter) {
- userDefMetric.addToMeter(name, (Meter)metric);
- } else if (metric instanceof Histogram) {
- userDefMetric.addToHistogram(name, (Histogram)metric);
- } else if (metric instanceof JStormTimer) {
- userDefMetric.addToTimer(name, ((JStormTimer)metric).getInstance());
- } else if (metric instanceof JStormHistogram) {
- userDefMetric.addToHistogram(name, ((JStormHistogram)metric).getInstance());
- } else {
- LOG.warn("registerUserDefine, unknow Metric type, name=" + name);
- }
-
- if (callback != null) {
- userDefMetric.registerCallback(callback, name);
- }
- }
-
-
- // copy from MetricRegistry
- public static void registerAll(String prefix, MetricSet metrics)
- throws IllegalArgumentException {
- for (Map.Entry<String, Metric> entry : metrics.getMetrics().entrySet()) {
- if (entry.getValue() instanceof MetricSet) {
- registerAll(MetricRegistry.name(prefix, entry.getKey()),
- (MetricSet) entry.getValue());
- } else {
- register(MetricRegistry.name(prefix, entry.getKey()),
- entry.getValue());
- }
- }
- }
-
- private static void updateMetric(String prefix, String name, MetricType metricType,
- Metric metric, String idStr) {
- Map<String, List<MetricInfo>> metricMap;
- List<MetricInfo> metricList;
- if (metricType == MetricType.TASK) {
- metricMap = taskMetricMap;
- metricList = metricMap.get(idStr);
- if (null == metricList) {
- metricList = new ArrayList<MetricInfo>();
- metricMap.put(idStr, metricList);
- }
- } else if (metricType == MetricType.WORKER) {
- metricList = workerMetricList;
- } else {
- LOG.error("updateMetricMap: unknown metric type");
- return;
- }
-
- MetricInfo metricInfo = new MetricInfo(prefix, name, metric);
- metricList.add(metricInfo);
-
- }
-
- public static Map<String, List<MetricInfo>> getTaskMetricMap() {
- return taskMetricMap;
- }
-
- public static List<MetricInfo> getWorkerMetricList() {
- return workerMetricList;
- }
-
- public static class QueueGauge implements Gauge<Float> {
- DisruptorQueue queue;
- String name;
-
- public QueueGauge(String name, DisruptorQueue queue) {
- this.queue = queue;
- this.name = name;
- }
-
- @Override
- public Float getValue() {
- Float ret = queue.pctFull();
- if (ret > 0.8) {
- //DEFAULT_LOG.info("Queue " + name + "is full " + ret);
- }
-
- return ret;
- }
-
- }
-
- public static Gauge<Float> registerQueue(String name, DisruptorQueue queue) {
- LOG.info("Register Metric " + name);
- return metrics.register(name, new QueueGauge(name, queue));
- }
-
- public static Gauge<Float> registerQueue(String prefix, String name, DisruptorQueue queue,
- String idStr, MetricType metricType) {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- Gauge<Float> ret = registerQueue(metricName, queue);
- updateMetric(prefix, name, metricType, ret, idStr);
- return ret;
- }
-
- public static Gauge<?> registerGauge(String name, Gauge<?> gauge) {
- LOG.info("Register Metric " + name);
- return metrics.register(name, gauge);
- }
-
- public static Counter registerCounter(String name) {
- LOG.info("Register Metric " + name);
- return metrics.counter(name);
- }
-
- public static Counter registerCounter(String prefix, String name,
- String idStr, MetricType metricType) {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- Counter ret = registerCounter(metricName);
- updateMetric(prefix, name, metricType, ret, idStr);
- return ret;
- }
-
- public static Meter registerMeter(String name) {
- LOG.info("Register Metric " + name);
- return metrics.meter(name);
- }
-
- public static Meter registerMeter(String prefix, String name,
- String idStr, MetricType metricType) {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- Meter ret = registerMeter(metricName);
- updateMetric(prefix, name, metricType, ret, idStr);
- return ret;
- }
-
- public static JStormHistogram registerHistograms(String name) {
- LOG.info("Register Metric " + name);
- Histogram instance = metrics.histogram(name);
-
- return new JStormHistogram(name, instance);
- }
-
- public static JStormHistogram registerHistograms(String prefix, String name,
- String idStr, MetricType metricType) {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- JStormHistogram ret = registerHistograms(metricName);
- updateMetric(prefix, name, metricType, ret.getInstance(), idStr);
- return ret;
- }
-
- public static JStormTimer registerTimer(String name) {
- LOG.info("Register Metric " + name);
-
- Timer instance = metrics.timer(name);
- return new JStormTimer(name, instance);
- }
-
- public static JStormTimer registerTimer(String prefix, String name,
- String idStr, MetricType metricType) {
- String metricName;
- if (prefix == null)
- metricName = name;
- else
- metricName = prefix + "-" + name;
- JStormTimer ret = registerTimer(metricName);
- updateMetric(prefix, name, metricType, ret.getInstance(), idStr);
- return ret;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetric.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetric.java
deleted file mode 100644
index 51f787e..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetric.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Map.Entry;
-import java.io.Serializable;
-
-import com.codahale.metrics.Metric;
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Sampling;
-import com.codahale.metrics.Snapshot;
-import com.codahale.metrics.Timer;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Meter;
-import com.alibaba.jstorm.client.metric.MetricCallback;
-import com.alibaba.jstorm.metric.MetricInfo;
-
-
-/**
- * /storm-zk-root/Monitor/{topologyid}/UserDefMetrics/{workerid} data
- */
-public class UserDefMetric {
-
- private static final long serialVersionUID = 4547327064057659279L;
-
- private Map<String, Gauge<?>> gaugeMap = new HashMap<String, Gauge<?>>();
- private Map<String, Counter> counterMap = new HashMap<String, Counter>();
- private Map<String, Histogram> histogramMap = new HashMap<String, Histogram>();
- private Map<String, Timer> timerMap = new HashMap<String, Timer>();
- private Map<String, Meter> meterMap = new HashMap<String, Meter>();
- private Map<String, MetricCallback> callbacks = new HashMap<String, MetricCallback>();
-
- public UserDefMetric() {
- }
-
- public Map<String, Gauge<?>> getGauge() {
- return this.gaugeMap;
- }
- public void registerCallback(MetricCallback callback, String name) {
- if (callbacks.containsKey(name) != true) {
- callbacks.put(name, callback);
- }
- }
- public void unregisterCallback(String name) {
- callbacks.remove(name);
- }
- public Map<String, MetricCallback> getCallbacks() {
- return callbacks;
- }
- public void addToGauge(String name, Gauge<?> gauge) {
- gaugeMap.put(name, gauge);
- }
-
- public Map<String, Counter> getCounter() {
- return this.counterMap;
- }
-
- public void addToCounter(String name, Counter counter) {
- counterMap.put(name, counter);
- }
-
- public Map<String, Histogram> getHistogram() {
- return this.histogramMap;
- }
-
- public void addToHistogram(String name, Histogram histogram) {
- histogramMap.put(name, histogram);
- }
-
-
- public Map<String, Timer> getTimer() {
- return this.timerMap;
- }
-
- public void addToTimer(String name, Timer timer) {
- timerMap.put(name, timer);
- }
-
- public Map<String, Meter> getMeter() {
- return this.meterMap;
- }
-
- public void addToMeter(String name, Meter meter) {
- meterMap.put(name, meter);
- }
-
- public void remove(String name) {
- if (gaugeMap.containsKey(name)) {
- gaugeMap.remove(name);
- } else if (counterMap.containsKey(name)) {
- counterMap.remove(name);
- } else if (histogramMap.containsKey(name)) {
- histogramMap.remove(name);
- } else if (timerMap.containsKey(name)) {
- timerMap.remove(name);
- } else if (meterMap.containsKey(name)) {
- meterMap.remove(name);
- }
-
- if (callbacks.containsKey(name)) {
- callbacks.remove(name);
- }
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetricData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetricData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetricData.java
deleted file mode 100644
index 7ca0860..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/UserDefMetricData.java
+++ /dev/null
@@ -1,138 +0,0 @@
-package com.alibaba.jstorm.metric;
-
-import java.util.Map;
-import java.util.HashMap;
-import java.util.Map.Entry;
-import java.io.Serializable;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-import org.apache.log4j.Logger;
-
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Timer;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Meter;
-import com.alibaba.jstorm.metric.metrdata.*;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-
-/**
- * /storm-zk-root/Monitor/{topologyid}/user/{workerid} data
- */
-public class UserDefMetricData implements Serializable {
- private static final Logger LOG = Logger.getLogger(UserDefMetricData.class);
-
- private static final long serialVersionUID = 954727168057659270L;
-
- private Map<String, GaugeData> gaugeDataMap = new HashMap<String, GaugeData>();
- private Map<String, CounterData> counterDataMap = new HashMap<String, CounterData>();
- private Map<String, TimerData> timerDataMap = new HashMap<String, TimerData>();
- private Map<String, MeterData> meterDataMap = new HashMap<String, MeterData>();
- private Map<String, HistogramData> histogramDataMap = new HashMap<String, HistogramData>();
-
- public UserDefMetricData() {
- }
-
- public Map<String, GaugeData> getGaugeDataMap() {
- return gaugeDataMap;
- }
-
- public Map<String, CounterData> getCounterDataMap() {
- return counterDataMap;
- }
-
- public Map<String, TimerData> getTimerDataMap() {
- return timerDataMap;
- }
-
- public Map<String, MeterData> getMeterDataMap() {
- return meterDataMap;
- }
-
- public Map<String, HistogramData> getHistogramDataMap() {
- return histogramDataMap;
- }
-
- public void updateFromGauge(Map<String, Gauge<?>> gaugeMap) {
- for(Entry<String, Gauge<?>> entry : gaugeMap.entrySet()) {
- try {
- GaugeData gaugeData = new GaugeData();
- gaugeData.setValue(JStormUtils.parseDouble(entry.getValue().getValue()));
- gaugeDataMap.put(entry.getKey(), gaugeData);
- } catch (Throwable e) {
- LOG.error("updateFromGauge exception ", e);
- }
- }
- }
-
- public void updateFromCounter(Map<String, Counter> counterMap) {
- for(Entry<String, Counter> entry : counterMap.entrySet()) {
- CounterData counterData = new CounterData();
- counterData.setValue(entry.getValue().getCount());
- counterDataMap.put(entry.getKey(), counterData);
- }
- }
-
- public void updateFromMeterData(Map<String, Meter> meterMap) {
- for(Entry<String, Meter> entry : meterMap.entrySet()) {
- Meter meter = entry.getValue();
- MeterData meterData = new MeterData();
- meterData.setCount(meter.getCount());
- meterData.setMeanRate(meter.getMeanRate());
- meterData.setOneMinuteRate(meter.getOneMinuteRate());
- meterData.setFiveMinuteRate(meter.getFiveMinuteRate());
- meterData.setFifteenMinuteRate(meter.getFifteenMinuteRate());
- meterDataMap.put(entry.getKey(), meterData);
- }
- }
-
- public void updateFromHistogramData(Map<String, Histogram> histogramMap) {
- for(Entry<String, Histogram> entry : histogramMap.entrySet()) {
- Histogram histogram = entry.getValue();
- HistogramData histogramData = new HistogramData();
- histogramData.setCount(histogram.getCount());
- histogramData.setMax(histogram.getSnapshot().getMax());
- histogramData.setMin(histogram.getSnapshot().getMin());
- histogramData.setMean(histogram.getSnapshot().getMean());
- histogramData.setMedian(histogram.getSnapshot().getMedian());
- histogramData.setStdDev(histogram.getSnapshot().getStdDev());
- histogramData.setPercent75th(histogram.getSnapshot().get75thPercentile());
- histogramData.setPercent95th(histogram.getSnapshot().get95thPercentile());
- histogramData.setPercent98th(histogram.getSnapshot().get98thPercentile());
- histogramData.setPercent99th(histogram.getSnapshot().get99thPercentile());
- histogramData.setPercent999th(histogram.getSnapshot().get999thPercentile());
- histogramDataMap.put(entry.getKey(), histogramData);
- }
- }
-
- public void updateFromTimerData(Map<String, Timer> timerMap) {
- for(Entry<String, Timer> entry : timerMap.entrySet()) {
- Timer timer = entry.getValue();
- TimerData timerData = new TimerData();
- timerData.setCount(timer.getCount());
- timerData.setMax(timer.getSnapshot().getMax());
- timerData.setMin(timer.getSnapshot().getMin());
- timerData.setMean(timer.getSnapshot().getMean());
- timerData.setMedian(timer.getSnapshot().getMedian());
- timerData.setStdDev(timer.getSnapshot().getStdDev());
- timerData.setPercent75th(timer.getSnapshot().get75thPercentile());
- timerData.setPercent95th(timer.getSnapshot().get95thPercentile());
- timerData.setPercent98th(timer.getSnapshot().get98thPercentile());
- timerData.setPercent99th(timer.getSnapshot().get99thPercentile());
- timerData.setPercent999th(timer.getSnapshot().get999thPercentile());
- timerData.setMeanRate(timer.getMeanRate());
- timerData.setOneMinuteRate(timer.getOneMinuteRate());
- timerData.setFiveMinuteRate(timer.getFiveMinuteRate());
- timerData.setFifteenMinuteRate(timer.getFifteenMinuteRate());
- timerDataMap.put(entry.getKey(), timerData);
- }
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/CounterData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/CounterData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/CounterData.java
deleted file mode 100644
index 727cb9d..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/CounterData.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.alibaba.jstorm.metric.metrdata;
-
-import java.io.Serializable;
-
-
-public class CounterData implements Serializable {
-
- private static final long serialVersionUID = 954627168057659219L;
-
- private long value;
-
- public CounterData () {
- value = 0l;
- }
-
- public long getValue() {
- return value;
- }
-
- public void setValue(long value) {
- this.value = value;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/GaugeData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/GaugeData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/GaugeData.java
deleted file mode 100644
index 9f64bf3..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/GaugeData.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.alibaba.jstorm.metric.metrdata;
-
-import java.io.Serializable;
-
-
-public class GaugeData implements Serializable {
-
- private static final long serialVersionUID = 954627168057659279L;
-
- private double value;
-
- public GaugeData () {
- value = 0.0;
- }
-
- public double getValue() {
- return value;
- }
-
- public void setValue(double value) {
- this.value = value;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/HistogramData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/HistogramData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/HistogramData.java
deleted file mode 100644
index ec39851..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/HistogramData.java
+++ /dev/null
@@ -1,112 +0,0 @@
-package com.alibaba.jstorm.metric.metrdata;
-
-import java.io.Serializable;
-
-
-public class HistogramData implements Serializable {
-
- private static final long serialVersionUID = 954627168057639289L;
-
- private long count;
- private long min;
- private long max;
- private double mean;
- private double stdDev;
- private double median;
- private double percent75th;
- private double percent95th;
- private double percent98th;
- private double percent99th;
- private double percent999th;
-
- public HistogramData() {
- }
-
- public long getCount() {
- return count;
- }
-
- public void setCount(long count) {
- this.count = count;
- }
-
- public long getMin() {
- return min;
- }
-
- public void setMin(long min) {
- this.min = min;
- }
-
- public long getMax() {
- return max;
- }
-
- public void setMax(long max) {
- this.max = max;
- }
-
- public double getMean() {
- return mean;
- }
-
- public void setMean(double mean) {
- this.mean = mean;
- }
-
- public double getStdDev() {
- return stdDev;
- }
-
- public void setStdDev(double stdDev) {
- this.stdDev = stdDev;
- }
-
- public double getMedian() {
- return median;
- }
-
- public void setMedian(double median) {
- this.median = median;
- }
-
- public double getPercent75th() {
- return percent75th;
- }
-
- public void setPercent75th(double percent75th) {
- this.percent75th = percent75th;
- }
-
- public double getPercent95th() {
- return percent95th;
- }
-
- public void setPercent95th(double percent95th) {
- this.percent95th = percent95th;
- }
-
- public double getPercent98th() {
- return percent98th;
- }
-
- public void setPercent98th(double percent98th) {
- this.percent98th = percent98th;
- }
-
- public double getPercent99th() {
- return percent99th;
- }
-
- public void setPercent99th(double percent99th) {
- this.percent99th = percent99th;
- }
-
- public double getPercent999th() {
- return percent999th;
- }
-
- public void setPercent999th(double percent999th) {
- this.percent999th = percent999th;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/MeterData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/MeterData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/MeterData.java
deleted file mode 100644
index 865a3c4..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/MeterData.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package com.alibaba.jstorm.metric.metrdata;
-
-import java.io.Serializable;
-
-
-public class MeterData implements Serializable {
-
- private static final long serialVersionUID = 954627168057659269L;
-
- private long count;
- private double meanRate;
- private double oneMinuteRate;
- private double fiveMinuteRate;
- private double fifteenMinuteRate;
-
- public MeterData() {
- }
-
- public void setCount(long count) {
- this.count = count;
- }
-
- public long getCount() {
- return this.count;
- }
-
- public void setMeanRate(double meanRate) {
- this.meanRate = meanRate;
- }
-
- public double getMeanRate() {
- return this.meanRate;
- }
-
- public void setOneMinuteRate(double oneMinuteRate) {
- this.oneMinuteRate = oneMinuteRate;
- }
-
- public double getOneMinuteRate() {
- return this.oneMinuteRate;
- }
-
- public void setFiveMinuteRate(double fiveMinuteRate) {
- this.fiveMinuteRate = fiveMinuteRate;
- }
-
- public double getFiveMinuteRate() {
- return this.fiveMinuteRate;
- }
-
- public void setFifteenMinuteRate(double fifteenMinuteRate) {
- this.fifteenMinuteRate = fifteenMinuteRate;
- }
-
- public double getFifteenMinuteRate() {
- return this.fifteenMinuteRate;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/TimerData.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/TimerData.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/TimerData.java
deleted file mode 100644
index 5aaab01..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/metric/metrdata/TimerData.java
+++ /dev/null
@@ -1,149 +0,0 @@
-package com.alibaba.jstorm.metric.metrdata;
-
-import java.io.Serializable;
-
-
-public class TimerData implements Serializable {
-
- private static final long serialVersionUID = 954627168057659239L;
-
- private long count;
- private double meanRate;
- private double oneMinuteRate;
- private double fiveMinuteRate;
- private double fifteenMinuteRate;
- private long min;
- private long max;
- private double mean;
- private double stdDev;
- private double median;
- private double percent75th;
- private double percent95th;
- private double percent98th;
- private double percent99th;
- private double percent999th;
-
- public TimerData() {
-
- }
-
- public long getCount() {
- return count;
- }
-
- public void setCount(long count) {
- this.count = count;
- }
-
- public long getMin() {
- return min;
- }
-
- public void setMin(long min) {
- this.min = min;
- }
-
- public long getMax() {
- return max;
- }
-
- public void setMax(long max) {
- this.max = max;
- }
-
- public double getMean() {
- return mean;
- }
-
- public void setMean(double mean) {
- this.mean = mean;
- }
-
- public double getStdDev() {
- return stdDev;
- }
-
- public void setStdDev(double stdDev) {
- this.stdDev = stdDev;
- }
-
- public double getMedian() {
- return median;
- }
-
- public void setMedian(double median) {
- this.median = median;
- }
-
- public double getPercent75th() {
- return percent75th;
- }
-
- public void setPercent75th(double percent75th) {
- this.percent75th = percent75th;
- }
-
- public double getPercent95th() {
- return percent95th;
- }
-
- public void setPercent95th(double percent95th) {
- this.percent95th = percent95th;
- }
-
- public double getPercent98th() {
- return percent98th;
- }
-
- public void setPercent98th(double percent98th) {
- this.percent98th = percent98th;
- }
-
- public double getPercent99th() {
- return percent99th;
- }
-
- public void setPercent99th(double percent99th) {
- this.percent99th = percent99th;
- }
-
- public double getPercent999th() {
- return percent999th;
- }
-
- public void setPercent999th(double percent999th) {
- this.percent999th = percent999th;
- }
-
- public void setMeanRate(double meanRate) {
- this.meanRate = meanRate;
- }
-
- public double getMeanRate() {
- return this.meanRate;
- }
-
- public void setOneMinuteRate(double oneMinuteRate) {
- this.oneMinuteRate = oneMinuteRate;
- }
-
- public double getOneMinuteRate() {
- return this.oneMinuteRate;
- }
-
- public void setFiveMinuteRate(double fiveMinuteRate) {
- this.fiveMinuteRate = fiveMinuteRate;
- }
-
- public double getFiveMinuteRate() {
- return this.fiveMinuteRate;
- }
-
- public void setFifteenMinuteRate(double fifteenMinuteRate) {
- this.fifteenMinuteRate = fifteenMinuteRate;
- }
-
- public double getFifteenMinuteRate() {
- return this.fifteenMinuteRate;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/DisruptorQueue.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/DisruptorQueue.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/DisruptorQueue.java
deleted file mode 100644
index b127095..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/DisruptorQueue.java
+++ /dev/null
@@ -1,151 +0,0 @@
-//package com.alibaba.jstorm.utils;
-//
-//import java.util.ArrayList;
-//import java.util.List;
-//import java.util.concurrent.Executor;
-//import java.util.concurrent.atomic.AtomicBoolean;
-//
-//import org.apache.commons.lang.mutable.MutableObject;
-//
-//import com.lmax.disruptor.EventFactory;
-//import com.lmax.disruptor.ExceptionHandler;
-//import com.lmax.disruptor.FatalExceptionHandler;
-//import com.lmax.disruptor.RingBuffer;
-//import com.lmax.disruptor.Sequence;
-//import com.lmax.disruptor.SequenceBarrier;
-//import com.lmax.disruptor.Sequencer;
-//import com.lmax.disruptor.WaitStrategy;
-//import com.lmax.disruptor.WorkHandler;
-//import com.lmax.disruptor.WorkProcessor;
-//import com.lmax.disruptor.util.Util;
-//
-//public class DisruptorQueue<T> {
-// private final RingBuffer<MutableObject> ringBuffer;
-// private final SequenceBarrier sequenceBarrier;
-// private final ExceptionHandler exceptionHandler;
-// private final List<WorkProcessor> workProcessors;
-// private final Sequence workSequence;
-// private final AtomicBoolean started = new AtomicBoolean(false);
-//
-// public DisruptorQueue(boolean isMultiProducer, int bufferSize,
-// WaitStrategy waitStrategy) {
-// if (isMultiProducer) {
-// ringBuffer = RingBuffer.createMultiProducer(
-// new ObjectEventFactory(), bufferSize, waitStrategy);
-// } else {
-// ringBuffer = RingBuffer.createSingleProducer(
-// new ObjectEventFactory(), bufferSize, waitStrategy);
-// }
-//
-// sequenceBarrier = ringBuffer.newBarrier();
-// exceptionHandler = new FatalExceptionHandler();
-// workProcessors = new ArrayList<WorkProcessor>();
-// workSequence = new Sequence(Sequencer.INITIAL_CURSOR_VALUE);
-// }
-//
-// public void register(WorkHandler<T> handler) {
-// WorkProcessor workProcessor = new WorkProcessor(ringBuffer,
-// sequenceBarrier, new HandleWraper(handler), exceptionHandler,
-// workSequence);
-//
-// ringBuffer.addGatingSequences(workProcessor.getSequence());
-//
-// workProcessors.add(workProcessor);
-// }
-//
-// void cleanup() {
-//
-// }
-//
-// /**
-// * Start the worker pool processing events in sequence.
-// *
-// * @param executor
-// * providing threads for running the workers.
-// * @return the {@link RingBuffer} used for the work queue.
-// * @throws IllegalStateException
-// * if the pool has already been started and not halted yet
-// */
-// public void start() {
-// if (!started.compareAndSet(false, true)) {
-// throw new IllegalStateException(
-// "WorkerPool has already been started and cannot be restarted until halted.");
-// }
-//
-// final long cursor = ringBuffer.getCursor();
-// workSequence.set(cursor);
-//
-// for (WorkProcessor<T> processor : workProcessors) {
-// processor.getSequence().set(cursor);
-// new Thread(processor).start();
-// }
-//
-// return;
-// }
-//
-// public Sequence[] getWorkerSequences() {
-// final Sequence[] sequences = new Sequence[workProcessors.size()];
-// for (int i = 0, size = workProcessors.size(); i < size; i++) {
-// sequences[i] = workProcessors.get(i).getSequence();
-// }
-//
-// return sequences;
-// }
-//
-// /**
-// * Wait for the {@link RingBuffer} to drain of published events then halt
-// * the workers.
-// */
-// public void drainAndHalt() {
-// Sequence[] workerSequences = getWorkerSequences();
-// while (ringBuffer.getCursor() > Util
-// .getMinimumSequence(workerSequences)) {
-// Thread.yield();
-// }
-//
-// for (WorkProcessor<?> processor : workProcessors) {
-// processor.halt();
-// }
-//
-// started.set(false);
-// }
-//
-// /**
-// * Halt all workers immediately at the end of their current cycle.
-// */
-// public void halt() {
-// for (WorkProcessor<?> processor : workProcessors) {
-// processor.halt();
-// }
-//
-// started.set(false);
-// }
-//
-// public void offer(T o) {
-// long sequence = ringBuffer.next();
-// ringBuffer.get(sequence).setValue(o);
-// ringBuffer.publish(sequence);
-// }
-//
-// public static class ObjectEventFactory implements
-// EventFactory<MutableObject> {
-//
-// public MutableObject newInstance() {
-// return new MutableObject();
-// }
-// }
-//
-// public static class HandleWraper<T> implements WorkHandler<MutableObject> {
-// private WorkHandler<T> handler;
-//
-// public HandleWraper(WorkHandler<T> handler) {
-// this.handler = handler;
-// }
-//
-// public void onEvent(MutableObject event) throws Exception {
-// // TODO Auto-generated method stub
-// handler.onEvent((T) event.getValue());
-// }
-//
-// }
-// }
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/EventSampler.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/EventSampler.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/EventSampler.java
deleted file mode 100644
index c0bff67..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/EventSampler.java
+++ /dev/null
@@ -1,100 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.util.Random;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.concurrent.atomic.AtomicLong;
-
-/**
- *
- * statistics tuples: sampling event
- *
- *
- * @author yannian/Longda
- *
- */
-public class EventSampler {
- private volatile int freq;
- private AtomicInteger i = new AtomicInteger(0);
- private volatile int target;
- private Random r = new Random();
-
- public EventSampler(int freq) {
- this.freq = freq;
- this.target = r.nextInt(freq);
-
- if (freq / 4 > 1) {
- intervalCheck.setInterval(freq / 4);
- }
- }
-
- /**
- * select 1/freq
- *
- * @return
- */
- public boolean countCheck() {
- i.incrementAndGet();
- if (i.get() > freq) {
- target = r.nextInt(freq);
- i.set(0);
- }
- if (i.get() == target) {
- return true;
- }
- return false;
- }
-
- private AtomicInteger counter = new AtomicInteger(0);
- private AtomicLong sum = new AtomicLong(0);
- private IntervalCheck intervalCheck = new IntervalCheck();
-
- public Integer tpsCheck() {
- int send = counter.incrementAndGet();
-
- Double pastSeconds = intervalCheck.checkAndGet();
- if (pastSeconds != null) {
- counter.set(0);
-
- return Integer.valueOf((int) (send / pastSeconds));
-
- }
-
- return null;
- }
-
- public Integer timesCheck() {
- int send = counter.incrementAndGet();
-
- Double pastSeconds = intervalCheck.checkAndGet();
- if (pastSeconds != null) {
- counter.set(0);
-
- return send;
-
- }
-
- return null;
- }
-
- public Pair<Integer, Double> avgCheck(long one) {
- int send = counter.incrementAndGet();
- long total = sum.addAndGet(one);
-
- Double pastSeconds = intervalCheck.checkAndGet();
- if (pastSeconds != null) {
- counter.set(0);
- sum.set(0);
-
- Double avg = Double.valueOf(0);
- if (send != 0) {
- avg = ((double)total)/send;
- }
-
-
- return new Pair<Integer, Double>(send, avg);
-
- }
-
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/ExpiredCallback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/ExpiredCallback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/ExpiredCallback.java
deleted file mode 100644
index 4ee450b..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/ExpiredCallback.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-public interface ExpiredCallback<K, V> {
- public void expire(K key, V val);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/FileAttribute.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/FileAttribute.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/FileAttribute.java
deleted file mode 100644
index 46582b2..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/FileAttribute.java
+++ /dev/null
@@ -1,118 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.Serializable;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-import org.json.simple.JSONAware;
-
-//import com.alibaba.fastjson.JSONAware;
-
-public class FileAttribute implements Serializable, JSONAware {
-
- /** */
- private static final long serialVersionUID = -5131640995402822835L;
-
- private String fileName;
- private String isDir;
- private String modifyTime;
- private String size;
-
- private static final String FILE_NAME_FIELD = "fileName";
- private static final String IS_DIR_FIELD = "isDir";
- private static final String MODIFY_TIME_FIELD = "modifyTime";
- private static final String SIZE_FIELD = "size";
-
- public String getFileName() {
- return fileName;
- }
-
- public void setFileName(String fileName) {
- this.fileName = fileName;
- }
-
- public String getIsDir() {
- return isDir;
- }
-
- public void setIsDir(String isDir) {
- this.isDir = isDir;
- }
-
- public String getModifyTime() {
- return modifyTime;
- }
-
- public void setModifyTime(String modifyTime) {
- this.modifyTime = modifyTime;
- }
-
- public String getSize() {
- return size;
- }
-
- public void setSize(String size) {
- this.size = size;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-
- @Override
- public String toJSONString() {
- Map<String, String> map = new HashMap<String, String>();
-
- map.put(FILE_NAME_FIELD, fileName);
- map.put(IS_DIR_FIELD, isDir);
- map.put(MODIFY_TIME_FIELD, modifyTime);
- map.put(SIZE_FIELD, size);
- return JStormUtils.to_json(map);
- }
-
- public static FileAttribute fromJSONObject(Map jobj) {
- if (jobj == null) {
- return null;
- }
-
- FileAttribute attribute = new FileAttribute();
-
- attribute.setFileName((String) jobj.get(FILE_NAME_FIELD));
- attribute.setIsDir((String) jobj.get(IS_DIR_FIELD));
- attribute.setModifyTime((String) jobj.get(MODIFY_TIME_FIELD));
- attribute.setSize((String) jobj.get(SIZE_FIELD));
-
- return attribute;
- }
-
- public static void main(String[] args) {
- Map<String, FileAttribute> map = new HashMap<String, FileAttribute>();
-
- FileAttribute attribute = new FileAttribute();
- attribute.setFileName("test");
- attribute.setIsDir("true");
- attribute.setModifyTime(new Date().toString());
- attribute.setSize("4096");
-
- map.put("test", attribute);
-
- System.out.println("Before:" + map);
-
- String jsonString = JStormUtils.to_json(map);
-
- Map<String, Map> map2 = (Map<String, Map>) JStormUtils
- .from_json(jsonString);
-
- Map jObject = map2.get("test");
-
- FileAttribute attribute2 = FileAttribute.fromJSONObject(jObject);
-
- System.out.println("attribute2:" + attribute2);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/HttpserverUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/HttpserverUtils.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/HttpserverUtils.java
deleted file mode 100644
index ee7376d..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/HttpserverUtils.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-public class HttpserverUtils {
-
- public static final String HTTPSERVER_CONTEXT_PATH_LOGVIEW = "/logview";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_CMD = "cmd";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_CMD_LIST = "listDir";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_CMD_SHOW = "showLog";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_CMD_JSTACK = "jstack";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_CMD_SHOW_CONF = "showConf";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_LOGFILE = "log";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_POS = "pos";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_DIR = "dir";
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_WORKER_PORT = "workerPort";
-
- public static final long HTTPSERVER_LOGVIEW_PAGESIZE = 16384;
-
- public static final String HTTPSERVER_LOGVIEW_PARAM_SIZE_FORMAT = "%016d\n";
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/IntervalCheck.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/IntervalCheck.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/IntervalCheck.java
deleted file mode 100644
index 6d0acc2..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/utils/IntervalCheck.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package com.alibaba.jstorm.utils;
-
-import java.io.Serializable;
-
-public class IntervalCheck implements Serializable {
-
- /**
- *
- */
- private static final long serialVersionUID = 8952971673547362883L;
-
- long lastCheck = System.currentTimeMillis();
-
- // default interval is 1 second
- long interval = 1;
-
- /*
- * if last check time is before interval seconds, return true, otherwise
- * return false
- */
- public boolean check() {
- return checkAndGet() != null;
- }
-
- /**
- *
- * @return
- */
- public Double checkAndGet() {
- long now = System.currentTimeMillis();
-
- synchronized (this) {
- if (now >= interval * 1000 + lastCheck) {
- double pastSecond = ((double) (now - lastCheck)) / 1000;
- lastCheck = now;
- return pastSecond;
- }
- }
-
- return null;
- }
-
- public long getInterval() {
- return interval;
- }
-
- public void setInterval(long interval) {
- this.interval = interval;
- }
-
- public void adjust(long addTimeMillis) {
- lastCheck += addTimeMillis;
- }
-
- public void start() {
- lastCheck = System.currentTimeMillis();
- }
-}
[03/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/ErrorInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/ErrorInfo.java b/jstorm-core/src/main/java/backtype/storm/generated/ErrorInfo.java
new file mode 100644
index 0000000..f52e526
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/ErrorInfo.java
@@ -0,0 +1,488 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class ErrorInfo implements org.apache.thrift.TBase<ErrorInfo, ErrorInfo._Fields>, java.io.Serializable, Cloneable, Comparable<ErrorInfo> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ErrorInfo");
+
+ private static final org.apache.thrift.protocol.TField ERROR_FIELD_DESC = new org.apache.thrift.protocol.TField("error", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField ERROR_TIME_SECS_FIELD_DESC = new org.apache.thrift.protocol.TField("error_time_secs", org.apache.thrift.protocol.TType.I32, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new ErrorInfoStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new ErrorInfoTupleSchemeFactory());
+ }
+
+ private String error; // required
+ private int error_time_secs; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ ERROR((short)1, "error"),
+ ERROR_TIME_SECS((short)2, "error_time_secs");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // ERROR
+ return ERROR;
+ case 2: // ERROR_TIME_SECS
+ return ERROR_TIME_SECS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __ERROR_TIME_SECS_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.ERROR, new org.apache.thrift.meta_data.FieldMetaData("error", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.ERROR_TIME_SECS, new org.apache.thrift.meta_data.FieldMetaData("error_time_secs", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ErrorInfo.class, metaDataMap);
+ }
+
+ public ErrorInfo() {
+ }
+
+ public ErrorInfo(
+ String error,
+ int error_time_secs)
+ {
+ this();
+ this.error = error;
+ this.error_time_secs = error_time_secs;
+ set_error_time_secs_isSet(true);
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public ErrorInfo(ErrorInfo other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.is_set_error()) {
+ this.error = other.error;
+ }
+ this.error_time_secs = other.error_time_secs;
+ }
+
+ public ErrorInfo deepCopy() {
+ return new ErrorInfo(this);
+ }
+
+ @Override
+ public void clear() {
+ this.error = null;
+ set_error_time_secs_isSet(false);
+ this.error_time_secs = 0;
+ }
+
+ public String get_error() {
+ return this.error;
+ }
+
+ public void set_error(String error) {
+ this.error = error;
+ }
+
+ public void unset_error() {
+ this.error = null;
+ }
+
+ /** Returns true if field error is set (has been assigned a value) and false otherwise */
+ public boolean is_set_error() {
+ return this.error != null;
+ }
+
+ public void set_error_isSet(boolean value) {
+ if (!value) {
+ this.error = null;
+ }
+ }
+
+ public int get_error_time_secs() {
+ return this.error_time_secs;
+ }
+
+ public void set_error_time_secs(int error_time_secs) {
+ this.error_time_secs = error_time_secs;
+ set_error_time_secs_isSet(true);
+ }
+
+ public void unset_error_time_secs() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __ERROR_TIME_SECS_ISSET_ID);
+ }
+
+ /** Returns true if field error_time_secs is set (has been assigned a value) and false otherwise */
+ public boolean is_set_error_time_secs() {
+ return EncodingUtils.testBit(__isset_bitfield, __ERROR_TIME_SECS_ISSET_ID);
+ }
+
+ public void set_error_time_secs_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __ERROR_TIME_SECS_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case ERROR:
+ if (value == null) {
+ unset_error();
+ } else {
+ set_error((String)value);
+ }
+ break;
+
+ case ERROR_TIME_SECS:
+ if (value == null) {
+ unset_error_time_secs();
+ } else {
+ set_error_time_secs((Integer)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case ERROR:
+ return get_error();
+
+ case ERROR_TIME_SECS:
+ return Integer.valueOf(get_error_time_secs());
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case ERROR:
+ return is_set_error();
+ case ERROR_TIME_SECS:
+ return is_set_error_time_secs();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof ErrorInfo)
+ return this.equals((ErrorInfo)that);
+ return false;
+ }
+
+ public boolean equals(ErrorInfo that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_error = true && this.is_set_error();
+ boolean that_present_error = true && that.is_set_error();
+ if (this_present_error || that_present_error) {
+ if (!(this_present_error && that_present_error))
+ return false;
+ if (!this.error.equals(that.error))
+ return false;
+ }
+
+ boolean this_present_error_time_secs = true;
+ boolean that_present_error_time_secs = true;
+ if (this_present_error_time_secs || that_present_error_time_secs) {
+ if (!(this_present_error_time_secs && that_present_error_time_secs))
+ return false;
+ if (this.error_time_secs != that.error_time_secs)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_error = true && (is_set_error());
+ list.add(present_error);
+ if (present_error)
+ list.add(error);
+
+ boolean present_error_time_secs = true;
+ list.add(present_error_time_secs);
+ if (present_error_time_secs)
+ list.add(error_time_secs);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(ErrorInfo other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_error()).compareTo(other.is_set_error());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_error()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.error, other.error);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_error_time_secs()).compareTo(other.is_set_error_time_secs());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_error_time_secs()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.error_time_secs, other.error_time_secs);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ErrorInfo(");
+ boolean first = true;
+
+ sb.append("error:");
+ if (this.error == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.error);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("error_time_secs:");
+ sb.append(this.error_time_secs);
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_error()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'error' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_error_time_secs()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'error_time_secs' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class ErrorInfoStandardSchemeFactory implements SchemeFactory {
+ public ErrorInfoStandardScheme getScheme() {
+ return new ErrorInfoStandardScheme();
+ }
+ }
+
+ private static class ErrorInfoStandardScheme extends StandardScheme<ErrorInfo> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ErrorInfo struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // ERROR
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.error = iprot.readString();
+ struct.set_error_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // ERROR_TIME_SECS
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.error_time_secs = iprot.readI32();
+ struct.set_error_time_secs_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ErrorInfo struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.error != null) {
+ oprot.writeFieldBegin(ERROR_FIELD_DESC);
+ oprot.writeString(struct.error);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldBegin(ERROR_TIME_SECS_FIELD_DESC);
+ oprot.writeI32(struct.error_time_secs);
+ oprot.writeFieldEnd();
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class ErrorInfoTupleSchemeFactory implements SchemeFactory {
+ public ErrorInfoTupleScheme getScheme() {
+ return new ErrorInfoTupleScheme();
+ }
+ }
+
+ private static class ErrorInfoTupleScheme extends TupleScheme<ErrorInfo> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, ErrorInfo struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.error);
+ oprot.writeI32(struct.error_time_secs);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, ErrorInfo struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.error = iprot.readString();
+ struct.set_error_isSet(true);
+ struct.error_time_secs = iprot.readI32();
+ struct.set_error_time_secs_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/GlobalStreamId.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/GlobalStreamId.java b/jstorm-core/src/main/java/backtype/storm/generated/GlobalStreamId.java
new file mode 100644
index 0000000..490a81d
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/GlobalStreamId.java
@@ -0,0 +1,490 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class GlobalStreamId implements org.apache.thrift.TBase<GlobalStreamId, GlobalStreamId._Fields>, java.io.Serializable, Cloneable, Comparable<GlobalStreamId> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GlobalStreamId");
+
+ private static final org.apache.thrift.protocol.TField COMPONENT_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("componentId", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField STREAM_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("streamId", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new GlobalStreamIdStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new GlobalStreamIdTupleSchemeFactory());
+ }
+
+ private String componentId; // required
+ private String streamId; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ COMPONENT_ID((short)1, "componentId"),
+ STREAM_ID((short)2, "streamId");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // COMPONENT_ID
+ return COMPONENT_ID;
+ case 2: // STREAM_ID
+ return STREAM_ID;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.COMPONENT_ID, new org.apache.thrift.meta_data.FieldMetaData("componentId", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.STREAM_ID, new org.apache.thrift.meta_data.FieldMetaData("streamId", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(GlobalStreamId.class, metaDataMap);
+ }
+
+ public GlobalStreamId() {
+ }
+
+ public GlobalStreamId(
+ String componentId,
+ String streamId)
+ {
+ this();
+ this.componentId = componentId;
+ this.streamId = streamId;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public GlobalStreamId(GlobalStreamId other) {
+ if (other.is_set_componentId()) {
+ this.componentId = other.componentId;
+ }
+ if (other.is_set_streamId()) {
+ this.streamId = other.streamId;
+ }
+ }
+
+ public GlobalStreamId deepCopy() {
+ return new GlobalStreamId(this);
+ }
+
+ @Override
+ public void clear() {
+ this.componentId = null;
+ this.streamId = null;
+ }
+
+ public String get_componentId() {
+ return this.componentId;
+ }
+
+ public void set_componentId(String componentId) {
+ this.componentId = componentId;
+ }
+
+ public void unset_componentId() {
+ this.componentId = null;
+ }
+
+ /** Returns true if field componentId is set (has been assigned a value) and false otherwise */
+ public boolean is_set_componentId() {
+ return this.componentId != null;
+ }
+
+ public void set_componentId_isSet(boolean value) {
+ if (!value) {
+ this.componentId = null;
+ }
+ }
+
+ public String get_streamId() {
+ return this.streamId;
+ }
+
+ public void set_streamId(String streamId) {
+ this.streamId = streamId;
+ }
+
+ public void unset_streamId() {
+ this.streamId = null;
+ }
+
+ /** Returns true if field streamId is set (has been assigned a value) and false otherwise */
+ public boolean is_set_streamId() {
+ return this.streamId != null;
+ }
+
+ public void set_streamId_isSet(boolean value) {
+ if (!value) {
+ this.streamId = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case COMPONENT_ID:
+ if (value == null) {
+ unset_componentId();
+ } else {
+ set_componentId((String)value);
+ }
+ break;
+
+ case STREAM_ID:
+ if (value == null) {
+ unset_streamId();
+ } else {
+ set_streamId((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case COMPONENT_ID:
+ return get_componentId();
+
+ case STREAM_ID:
+ return get_streamId();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case COMPONENT_ID:
+ return is_set_componentId();
+ case STREAM_ID:
+ return is_set_streamId();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof GlobalStreamId)
+ return this.equals((GlobalStreamId)that);
+ return false;
+ }
+
+ public boolean equals(GlobalStreamId that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_componentId = true && this.is_set_componentId();
+ boolean that_present_componentId = true && that.is_set_componentId();
+ if (this_present_componentId || that_present_componentId) {
+ if (!(this_present_componentId && that_present_componentId))
+ return false;
+ if (!this.componentId.equals(that.componentId))
+ return false;
+ }
+
+ boolean this_present_streamId = true && this.is_set_streamId();
+ boolean that_present_streamId = true && that.is_set_streamId();
+ if (this_present_streamId || that_present_streamId) {
+ if (!(this_present_streamId && that_present_streamId))
+ return false;
+ if (!this.streamId.equals(that.streamId))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_componentId = true && (is_set_componentId());
+ list.add(present_componentId);
+ if (present_componentId)
+ list.add(componentId);
+
+ boolean present_streamId = true && (is_set_streamId());
+ list.add(present_streamId);
+ if (present_streamId)
+ list.add(streamId);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(GlobalStreamId other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_componentId()).compareTo(other.is_set_componentId());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_componentId()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.componentId, other.componentId);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_streamId()).compareTo(other.is_set_streamId());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_streamId()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.streamId, other.streamId);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("GlobalStreamId(");
+ boolean first = true;
+
+ sb.append("componentId:");
+ if (this.componentId == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.componentId);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("streamId:");
+ if (this.streamId == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.streamId);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_componentId()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'componentId' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_streamId()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'streamId' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class GlobalStreamIdStandardSchemeFactory implements SchemeFactory {
+ public GlobalStreamIdStandardScheme getScheme() {
+ return new GlobalStreamIdStandardScheme();
+ }
+ }
+
+ private static class GlobalStreamIdStandardScheme extends StandardScheme<GlobalStreamId> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, GlobalStreamId struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // COMPONENT_ID
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.componentId = iprot.readString();
+ struct.set_componentId_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // STREAM_ID
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.streamId = iprot.readString();
+ struct.set_streamId_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, GlobalStreamId struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.componentId != null) {
+ oprot.writeFieldBegin(COMPONENT_ID_FIELD_DESC);
+ oprot.writeString(struct.componentId);
+ oprot.writeFieldEnd();
+ }
+ if (struct.streamId != null) {
+ oprot.writeFieldBegin(STREAM_ID_FIELD_DESC);
+ oprot.writeString(struct.streamId);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class GlobalStreamIdTupleSchemeFactory implements SchemeFactory {
+ public GlobalStreamIdTupleScheme getScheme() {
+ return new GlobalStreamIdTupleScheme();
+ }
+ }
+
+ private static class GlobalStreamIdTupleScheme extends TupleScheme<GlobalStreamId> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, GlobalStreamId struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.componentId);
+ oprot.writeString(struct.streamId);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, GlobalStreamId struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.componentId = iprot.readString();
+ struct.set_componentId_isSet(true);
+ struct.streamId = iprot.readString();
+ struct.set_streamId_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/Grouping.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/Grouping.java b/jstorm-core/src/main/java/backtype/storm/generated/Grouping.java
new file mode 100755
index 0000000..bc60a06
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/Grouping.java
@@ -0,0 +1,844 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+public class Grouping extends org.apache.thrift.TUnion<Grouping, Grouping._Fields> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Grouping");
+ private static final org.apache.thrift.protocol.TField FIELDS_FIELD_DESC = new org.apache.thrift.protocol.TField("fields", org.apache.thrift.protocol.TType.LIST, (short)1);
+ private static final org.apache.thrift.protocol.TField SHUFFLE_FIELD_DESC = new org.apache.thrift.protocol.TField("shuffle", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+ private static final org.apache.thrift.protocol.TField ALL_FIELD_DESC = new org.apache.thrift.protocol.TField("all", org.apache.thrift.protocol.TType.STRUCT, (short)3);
+ private static final org.apache.thrift.protocol.TField NONE_FIELD_DESC = new org.apache.thrift.protocol.TField("none", org.apache.thrift.protocol.TType.STRUCT, (short)4);
+ private static final org.apache.thrift.protocol.TField DIRECT_FIELD_DESC = new org.apache.thrift.protocol.TField("direct", org.apache.thrift.protocol.TType.STRUCT, (short)5);
+ private static final org.apache.thrift.protocol.TField CUSTOM_OBJECT_FIELD_DESC = new org.apache.thrift.protocol.TField("custom_object", org.apache.thrift.protocol.TType.STRUCT, (short)6);
+ private static final org.apache.thrift.protocol.TField CUSTOM_SERIALIZED_FIELD_DESC = new org.apache.thrift.protocol.TField("custom_serialized", org.apache.thrift.protocol.TType.STRING, (short)7);
+ private static final org.apache.thrift.protocol.TField LOCAL_OR_SHUFFLE_FIELD_DESC = new org.apache.thrift.protocol.TField("local_or_shuffle", org.apache.thrift.protocol.TType.STRUCT, (short)8);
+ private static final org.apache.thrift.protocol.TField LOCAL_FIRST_FIELD_DESC = new org.apache.thrift.protocol.TField("localFirst", org.apache.thrift.protocol.TType.STRUCT, (short)9);
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ FIELDS((short)1, "fields"),
+ SHUFFLE((short)2, "shuffle"),
+ ALL((short)3, "all"),
+ NONE((short)4, "none"),
+ DIRECT((short)5, "direct"),
+ CUSTOM_OBJECT((short)6, "custom_object"),
+ CUSTOM_SERIALIZED((short)7, "custom_serialized"),
+ LOCAL_OR_SHUFFLE((short)8, "local_or_shuffle"),
+ LOCAL_FIRST((short)9, "localFirst");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // FIELDS
+ return FIELDS;
+ case 2: // SHUFFLE
+ return SHUFFLE;
+ case 3: // ALL
+ return ALL;
+ case 4: // NONE
+ return NONE;
+ case 5: // DIRECT
+ return DIRECT;
+ case 6: // CUSTOM_OBJECT
+ return CUSTOM_OBJECT;
+ case 7: // CUSTOM_SERIALIZED
+ return CUSTOM_SERIALIZED;
+ case 8: // LOCAL_OR_SHUFFLE
+ return LOCAL_OR_SHUFFLE;
+ case 9: // LOCAL_FIRST
+ return LOCAL_FIRST;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FIELDS, new org.apache.thrift.meta_data.FieldMetaData("fields", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+ tmpMap.put(_Fields.SHUFFLE, new org.apache.thrift.meta_data.FieldMetaData("shuffle", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ tmpMap.put(_Fields.ALL, new org.apache.thrift.meta_data.FieldMetaData("all", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ tmpMap.put(_Fields.NONE, new org.apache.thrift.meta_data.FieldMetaData("none", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ tmpMap.put(_Fields.DIRECT, new org.apache.thrift.meta_data.FieldMetaData("direct", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ tmpMap.put(_Fields.CUSTOM_OBJECT, new org.apache.thrift.meta_data.FieldMetaData("custom_object", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, JavaObject.class)));
+ tmpMap.put(_Fields.CUSTOM_SERIALIZED, new org.apache.thrift.meta_data.FieldMetaData("custom_serialized", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.LOCAL_OR_SHUFFLE, new org.apache.thrift.meta_data.FieldMetaData("local_or_shuffle", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ tmpMap.put(_Fields.LOCAL_FIRST, new org.apache.thrift.meta_data.FieldMetaData("localFirst", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NullStruct.class)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Grouping.class, metaDataMap);
+ }
+
+ public Grouping() {
+ super();
+ }
+
+ public Grouping(_Fields setField, Object value) {
+ super(setField, value);
+ }
+
+ public Grouping(Grouping other) {
+ super(other);
+ }
+ public Grouping deepCopy() {
+ return new Grouping(this);
+ }
+
+ public static Grouping fields(List<String> value) {
+ Grouping x = new Grouping();
+ x.set_fields(value);
+ return x;
+ }
+
+ public static Grouping shuffle(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_shuffle(value);
+ return x;
+ }
+
+ public static Grouping all(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_all(value);
+ return x;
+ }
+
+ public static Grouping none(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_none(value);
+ return x;
+ }
+
+ public static Grouping direct(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_direct(value);
+ return x;
+ }
+
+ public static Grouping custom_object(JavaObject value) {
+ Grouping x = new Grouping();
+ x.set_custom_object(value);
+ return x;
+ }
+
+ public static Grouping custom_serialized(ByteBuffer value) {
+ Grouping x = new Grouping();
+ x.set_custom_serialized(value);
+ return x;
+ }
+
+ public static Grouping custom_serialized(byte[] value) {
+ Grouping x = new Grouping();
+ x.set_custom_serialized(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ return x;
+ }
+
+ public static Grouping local_or_shuffle(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_local_or_shuffle(value);
+ return x;
+ }
+
+ public static Grouping localFirst(NullStruct value) {
+ Grouping x = new Grouping();
+ x.set_localFirst(value);
+ return x;
+ }
+
+
+ @Override
+ protected void checkType(_Fields setField, Object value) throws ClassCastException {
+ switch (setField) {
+ case FIELDS:
+ if (value instanceof List) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type List<String> for field 'fields', but got " + value.getClass().getSimpleName());
+ case SHUFFLE:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'shuffle', but got " + value.getClass().getSimpleName());
+ case ALL:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'all', but got " + value.getClass().getSimpleName());
+ case NONE:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'none', but got " + value.getClass().getSimpleName());
+ case DIRECT:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'direct', but got " + value.getClass().getSimpleName());
+ case CUSTOM_OBJECT:
+ if (value instanceof JavaObject) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type JavaObject for field 'custom_object', but got " + value.getClass().getSimpleName());
+ case CUSTOM_SERIALIZED:
+ if (value instanceof ByteBuffer) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type ByteBuffer for field 'custom_serialized', but got " + value.getClass().getSimpleName());
+ case LOCAL_OR_SHUFFLE:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'local_or_shuffle', but got " + value.getClass().getSimpleName());
+ case LOCAL_FIRST:
+ if (value instanceof NullStruct) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type NullStruct for field 'localFirst', but got " + value.getClass().getSimpleName());
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(field.id);
+ if (setField != null) {
+ switch (setField) {
+ case FIELDS:
+ if (field.type == FIELDS_FIELD_DESC.type) {
+ List<String> fields;
+ {
+ org.apache.thrift.protocol.TList _list8 = iprot.readListBegin();
+ fields = new ArrayList<String>(_list8.size);
+ String _elem9;
+ for (int _i10 = 0; _i10 < _list8.size; ++_i10)
+ {
+ _elem9 = iprot.readString();
+ fields.add(_elem9);
+ }
+ iprot.readListEnd();
+ }
+ return fields;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case SHUFFLE:
+ if (field.type == SHUFFLE_FIELD_DESC.type) {
+ NullStruct shuffle;
+ shuffle = new NullStruct();
+ shuffle.read(iprot);
+ return shuffle;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case ALL:
+ if (field.type == ALL_FIELD_DESC.type) {
+ NullStruct all;
+ all = new NullStruct();
+ all.read(iprot);
+ return all;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case NONE:
+ if (field.type == NONE_FIELD_DESC.type) {
+ NullStruct none;
+ none = new NullStruct();
+ none.read(iprot);
+ return none;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case DIRECT:
+ if (field.type == DIRECT_FIELD_DESC.type) {
+ NullStruct direct;
+ direct = new NullStruct();
+ direct.read(iprot);
+ return direct;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case CUSTOM_OBJECT:
+ if (field.type == CUSTOM_OBJECT_FIELD_DESC.type) {
+ JavaObject custom_object;
+ custom_object = new JavaObject();
+ custom_object.read(iprot);
+ return custom_object;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case CUSTOM_SERIALIZED:
+ if (field.type == CUSTOM_SERIALIZED_FIELD_DESC.type) {
+ ByteBuffer custom_serialized;
+ custom_serialized = iprot.readBinary();
+ return custom_serialized;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case LOCAL_OR_SHUFFLE:
+ if (field.type == LOCAL_OR_SHUFFLE_FIELD_DESC.type) {
+ NullStruct local_or_shuffle;
+ local_or_shuffle = new NullStruct();
+ local_or_shuffle.read(iprot);
+ return local_or_shuffle;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case LOCAL_FIRST:
+ if (field.type == LOCAL_FIRST_FIELD_DESC.type) {
+ NullStruct localFirst;
+ localFirst = new NullStruct();
+ localFirst.read(iprot);
+ return localFirst;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ }
+
+ @Override
+ protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case FIELDS:
+ List<String> fields = (List<String>)value_;
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, fields.size()));
+ for (String _iter11 : fields)
+ {
+ oprot.writeString(_iter11);
+ }
+ oprot.writeListEnd();
+ }
+ return;
+ case SHUFFLE:
+ NullStruct shuffle = (NullStruct)value_;
+ shuffle.write(oprot);
+ return;
+ case ALL:
+ NullStruct all = (NullStruct)value_;
+ all.write(oprot);
+ return;
+ case NONE:
+ NullStruct none = (NullStruct)value_;
+ none.write(oprot);
+ return;
+ case DIRECT:
+ NullStruct direct = (NullStruct)value_;
+ direct.write(oprot);
+ return;
+ case CUSTOM_OBJECT:
+ JavaObject custom_object = (JavaObject)value_;
+ custom_object.write(oprot);
+ return;
+ case CUSTOM_SERIALIZED:
+ ByteBuffer custom_serialized = (ByteBuffer)value_;
+ oprot.writeBinary(custom_serialized);
+ return;
+ case LOCAL_OR_SHUFFLE:
+ NullStruct local_or_shuffle = (NullStruct)value_;
+ local_or_shuffle.write(oprot);
+ return;
+ case LOCAL_FIRST:
+ NullStruct localFirst = (NullStruct)value_;
+ localFirst.write(oprot);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(fieldID);
+ if (setField != null) {
+ switch (setField) {
+ case FIELDS:
+ List<String> fields;
+ {
+ org.apache.thrift.protocol.TList _list12 = iprot.readListBegin();
+ fields = new ArrayList<String>(_list12.size);
+ String _elem13;
+ for (int _i14 = 0; _i14 < _list12.size; ++_i14)
+ {
+ _elem13 = iprot.readString();
+ fields.add(_elem13);
+ }
+ iprot.readListEnd();
+ }
+ return fields;
+ case SHUFFLE:
+ NullStruct shuffle;
+ shuffle = new NullStruct();
+ shuffle.read(iprot);
+ return shuffle;
+ case ALL:
+ NullStruct all;
+ all = new NullStruct();
+ all.read(iprot);
+ return all;
+ case NONE:
+ NullStruct none;
+ none = new NullStruct();
+ none.read(iprot);
+ return none;
+ case DIRECT:
+ NullStruct direct;
+ direct = new NullStruct();
+ direct.read(iprot);
+ return direct;
+ case CUSTOM_OBJECT:
+ JavaObject custom_object;
+ custom_object = new JavaObject();
+ custom_object.read(iprot);
+ return custom_object;
+ case CUSTOM_SERIALIZED:
+ ByteBuffer custom_serialized;
+ custom_serialized = iprot.readBinary();
+ return custom_serialized;
+ case LOCAL_OR_SHUFFLE:
+ NullStruct local_or_shuffle;
+ local_or_shuffle = new NullStruct();
+ local_or_shuffle.read(iprot);
+ return local_or_shuffle;
+ case LOCAL_FIRST:
+ NullStruct localFirst;
+ localFirst = new NullStruct();
+ localFirst.read(iprot);
+ return localFirst;
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ throw new TProtocolException("Couldn't find a field with field id " + fieldID);
+ }
+ }
+
+ @Override
+ protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case FIELDS:
+ List<String> fields = (List<String>)value_;
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, fields.size()));
+ for (String _iter15 : fields)
+ {
+ oprot.writeString(_iter15);
+ }
+ oprot.writeListEnd();
+ }
+ return;
+ case SHUFFLE:
+ NullStruct shuffle = (NullStruct)value_;
+ shuffle.write(oprot);
+ return;
+ case ALL:
+ NullStruct all = (NullStruct)value_;
+ all.write(oprot);
+ return;
+ case NONE:
+ NullStruct none = (NullStruct)value_;
+ none.write(oprot);
+ return;
+ case DIRECT:
+ NullStruct direct = (NullStruct)value_;
+ direct.write(oprot);
+ return;
+ case CUSTOM_OBJECT:
+ JavaObject custom_object = (JavaObject)value_;
+ custom_object.write(oprot);
+ return;
+ case CUSTOM_SERIALIZED:
+ ByteBuffer custom_serialized = (ByteBuffer)value_;
+ oprot.writeBinary(custom_serialized);
+ return;
+ case LOCAL_OR_SHUFFLE:
+ NullStruct local_or_shuffle = (NullStruct)value_;
+ local_or_shuffle.write(oprot);
+ return;
+ case LOCAL_FIRST:
+ NullStruct localFirst = (NullStruct)value_;
+ localFirst.write(oprot);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
+ switch (setField) {
+ case FIELDS:
+ return FIELDS_FIELD_DESC;
+ case SHUFFLE:
+ return SHUFFLE_FIELD_DESC;
+ case ALL:
+ return ALL_FIELD_DESC;
+ case NONE:
+ return NONE_FIELD_DESC;
+ case DIRECT:
+ return DIRECT_FIELD_DESC;
+ case CUSTOM_OBJECT:
+ return CUSTOM_OBJECT_FIELD_DESC;
+ case CUSTOM_SERIALIZED:
+ return CUSTOM_SERIALIZED_FIELD_DESC;
+ case LOCAL_OR_SHUFFLE:
+ return LOCAL_OR_SHUFFLE_FIELD_DESC;
+ case LOCAL_FIRST:
+ return LOCAL_FIRST_FIELD_DESC;
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TStruct getStructDesc() {
+ return STRUCT_DESC;
+ }
+
+ @Override
+ protected _Fields enumForId(short id) {
+ return _Fields.findByThriftIdOrThrow(id);
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+
+ public List<String> get_fields() {
+ if (getSetField() == _Fields.FIELDS) {
+ return (List<String>)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'fields' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_fields(List<String> value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.FIELDS;
+ value_ = value;
+ }
+
+ public NullStruct get_shuffle() {
+ if (getSetField() == _Fields.SHUFFLE) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'shuffle' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_shuffle(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.SHUFFLE;
+ value_ = value;
+ }
+
+ public NullStruct get_all() {
+ if (getSetField() == _Fields.ALL) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'all' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_all(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.ALL;
+ value_ = value;
+ }
+
+ public NullStruct get_none() {
+ if (getSetField() == _Fields.NONE) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'none' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_none(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.NONE;
+ value_ = value;
+ }
+
+ public NullStruct get_direct() {
+ if (getSetField() == _Fields.DIRECT) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'direct' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_direct(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.DIRECT;
+ value_ = value;
+ }
+
+ public JavaObject get_custom_object() {
+ if (getSetField() == _Fields.CUSTOM_OBJECT) {
+ return (JavaObject)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'custom_object' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_custom_object(JavaObject value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.CUSTOM_OBJECT;
+ value_ = value;
+ }
+
+ public byte[] get_custom_serialized() {
+ set_custom_serialized(org.apache.thrift.TBaseHelper.rightSize(buffer_for_custom_serialized()));
+ ByteBuffer b = buffer_for_custom_serialized();
+ return b == null ? null : b.array();
+ }
+
+ public ByteBuffer buffer_for_custom_serialized() {
+ if (getSetField() == _Fields.CUSTOM_SERIALIZED) {
+ return org.apache.thrift.TBaseHelper.copyBinary((ByteBuffer)getFieldValue());
+ } else {
+ throw new RuntimeException("Cannot get field 'custom_serialized' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_custom_serialized(byte[] value) {
+ set_custom_serialized(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ }
+
+ public void set_custom_serialized(ByteBuffer value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.CUSTOM_SERIALIZED;
+ value_ = value;
+ }
+
+ public NullStruct get_local_or_shuffle() {
+ if (getSetField() == _Fields.LOCAL_OR_SHUFFLE) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'local_or_shuffle' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_local_or_shuffle(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.LOCAL_OR_SHUFFLE;
+ value_ = value;
+ }
+
+ public NullStruct get_localFirst() {
+ if (getSetField() == _Fields.LOCAL_FIRST) {
+ return (NullStruct)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'localFirst' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_localFirst(NullStruct value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.LOCAL_FIRST;
+ value_ = value;
+ }
+
+ public boolean is_set_fields() {
+ return setField_ == _Fields.FIELDS;
+ }
+
+
+ public boolean is_set_shuffle() {
+ return setField_ == _Fields.SHUFFLE;
+ }
+
+
+ public boolean is_set_all() {
+ return setField_ == _Fields.ALL;
+ }
+
+
+ public boolean is_set_none() {
+ return setField_ == _Fields.NONE;
+ }
+
+
+ public boolean is_set_direct() {
+ return setField_ == _Fields.DIRECT;
+ }
+
+
+ public boolean is_set_custom_object() {
+ return setField_ == _Fields.CUSTOM_OBJECT;
+ }
+
+
+ public boolean is_set_custom_serialized() {
+ return setField_ == _Fields.CUSTOM_SERIALIZED;
+ }
+
+
+ public boolean is_set_local_or_shuffle() {
+ return setField_ == _Fields.LOCAL_OR_SHUFFLE;
+ }
+
+
+ public boolean is_set_localFirst() {
+ return setField_ == _Fields.LOCAL_FIRST;
+ }
+
+
+ public boolean equals(Object other) {
+ if (other instanceof Grouping) {
+ return equals((Grouping)other);
+ } else {
+ return false;
+ }
+ }
+
+ public boolean equals(Grouping other) {
+ return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
+ }
+
+ @Override
+ public int compareTo(Grouping other) {
+ int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
+ if (lastComparison == 0) {
+ return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
+ }
+ return lastComparison;
+ }
+
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+ list.add(this.getClass().getName());
+ org.apache.thrift.TFieldIdEnum setField = getSetField();
+ if (setField != null) {
+ list.add(setField.getThriftFieldId());
+ Object value = getFieldValue();
+ if (value instanceof org.apache.thrift.TEnum) {
+ list.add(((org.apache.thrift.TEnum)getFieldValue()).getValue());
+ } else {
+ list.add(value);
+ }
+ }
+ return list.hashCode();
+ }
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/InvalidTopologyException.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/InvalidTopologyException.java b/jstorm-core/src/main/java/backtype/storm/generated/InvalidTopologyException.java
new file mode 100644
index 0000000..3d5424a
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/InvalidTopologyException.java
@@ -0,0 +1,389 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class InvalidTopologyException extends TException implements org.apache.thrift.TBase<InvalidTopologyException, InvalidTopologyException._Fields>, java.io.Serializable, Cloneable, Comparable<InvalidTopologyException> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("InvalidTopologyException");
+
+ private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new InvalidTopologyExceptionStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new InvalidTopologyExceptionTupleSchemeFactory());
+ }
+
+ private String msg; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ MSG((short)1, "msg");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // MSG
+ return MSG;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(InvalidTopologyException.class, metaDataMap);
+ }
+
+ public InvalidTopologyException() {
+ }
+
+ public InvalidTopologyException(
+ String msg)
+ {
+ this();
+ this.msg = msg;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public InvalidTopologyException(InvalidTopologyException other) {
+ if (other.is_set_msg()) {
+ this.msg = other.msg;
+ }
+ }
+
+ public InvalidTopologyException deepCopy() {
+ return new InvalidTopologyException(this);
+ }
+
+ @Override
+ public void clear() {
+ this.msg = null;
+ }
+
+ public String get_msg() {
+ return this.msg;
+ }
+
+ public void set_msg(String msg) {
+ this.msg = msg;
+ }
+
+ public void unset_msg() {
+ this.msg = null;
+ }
+
+ /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+ public boolean is_set_msg() {
+ return this.msg != null;
+ }
+
+ public void set_msg_isSet(boolean value) {
+ if (!value) {
+ this.msg = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case MSG:
+ if (value == null) {
+ unset_msg();
+ } else {
+ set_msg((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case MSG:
+ return get_msg();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case MSG:
+ return is_set_msg();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof InvalidTopologyException)
+ return this.equals((InvalidTopologyException)that);
+ return false;
+ }
+
+ public boolean equals(InvalidTopologyException that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_msg = true && this.is_set_msg();
+ boolean that_present_msg = true && that.is_set_msg();
+ if (this_present_msg || that_present_msg) {
+ if (!(this_present_msg && that_present_msg))
+ return false;
+ if (!this.msg.equals(that.msg))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_msg = true && (is_set_msg());
+ list.add(present_msg);
+ if (present_msg)
+ list.add(msg);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(InvalidTopologyException other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_msg()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("InvalidTopologyException(");
+ boolean first = true;
+
+ sb.append("msg:");
+ if (this.msg == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.msg);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_msg()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class InvalidTopologyExceptionStandardSchemeFactory implements SchemeFactory {
+ public InvalidTopologyExceptionStandardScheme getScheme() {
+ return new InvalidTopologyExceptionStandardScheme();
+ }
+ }
+
+ private static class InvalidTopologyExceptionStandardScheme extends StandardScheme<InvalidTopologyException> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, InvalidTopologyException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // MSG
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, InvalidTopologyException struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.msg != null) {
+ oprot.writeFieldBegin(MSG_FIELD_DESC);
+ oprot.writeString(struct.msg);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class InvalidTopologyExceptionTupleSchemeFactory implements SchemeFactory {
+ public InvalidTopologyExceptionTupleScheme getScheme() {
+ return new InvalidTopologyExceptionTupleScheme();
+ }
+ }
+
+ private static class InvalidTopologyExceptionTupleScheme extends TupleScheme<InvalidTopologyException> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, InvalidTopologyException struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.msg);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, InvalidTopologyException struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ }
+ }
+
+}
+
[49/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/TestReachTopology.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/TestReachTopology.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/TestReachTopology.java
old mode 100644
new mode 100755
index b1e5578..f4c11b3
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/TestReachTopology.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/TestReachTopology.java
@@ -1,9 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.drpc;
-import org.apache.thrift7.TException;
+import java.util.Map;
-import backtype.storm.generated.DRPCExecutionException;
import backtype.storm.utils.DRPCClient;
+import backtype.storm.utils.Utils;
public class TestReachTopology {
@@ -13,13 +30,14 @@ public class TestReachTopology {
* @throws DRPCExecutionException
* @throws TException
*/
- public static void main(String[] args) throws TException, DRPCExecutionException {
+ public static void main(String[] args) throws Exception {
if (args.length < 1) {
throw new IllegalArgumentException("Invalid parameter");
}
+ Map conf = Utils.readStormConfig();
//"foo.com/blog/1" "engineering.twitter.com/blog/5"
- DRPCClient client = new DRPCClient(args[0], 4772);
+ DRPCClient client = new DRPCClient(conf, args[0], 4772);
String result = client.execute(ReachTopology.TOPOLOGY_NAME, "tech.backtype.com/blog/123");
System.out.println("\n!!! Drpc result:" + result);
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopology.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopology.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopology.java
old mode 100644
new mode 100755
index 7a828e5..93f4594
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopology.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopology.java
@@ -1,15 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
-import java.util.Properties;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.yaml.snakeyaml.Yaml;
+
+import com.alibaba.jstorm.utils.JStormUtils;
+import com.alibaba.jstorm.utils.LoadConf;
+import com.alipay.dw.jstorm.example.sequence.bean.Pair;
+import com.alipay.dw.jstorm.example.sequence.bean.TradeCustomer;
+import com.alipay.dw.jstorm.example.sequence.bolt.MergeRecord;
+import com.alipay.dw.jstorm.example.sequence.bolt.PairCount;
+import com.alipay.dw.jstorm.example.sequence.bolt.SplitRecord;
+import com.alipay.dw.jstorm.example.sequence.bolt.TotalCount;
+import com.alipay.dw.jstorm.example.sequence.spout.SequenceSpout;
import backtype.storm.Config;
import backtype.storm.LocalCluster;
@@ -21,15 +43,6 @@ import backtype.storm.topology.BoltDeclarer;
import backtype.storm.topology.TopologyBuilder;
import backtype.storm.tuple.Fields;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alipay.dw.jstorm.example.sequence.bean.Pair;
-import com.alipay.dw.jstorm.example.sequence.bean.TradeCustomer;
-import com.alipay.dw.jstorm.example.sequence.bolt.MergeRecord;
-import com.alipay.dw.jstorm.example.sequence.bolt.PairCount;
-import com.alipay.dw.jstorm.example.sequence.bolt.SplitRecord;
-import com.alipay.dw.jstorm.example.sequence.bolt.TotalCount;
-import com.alipay.dw.jstorm.example.sequence.spout.SequenceSpout;
-
public class SequenceTopology {
private static Logger LOG = LoggerFactory.getLogger(SequenceTopology.class);
@@ -57,7 +70,7 @@ public class SequenceTopology {
// localFirstGrouping is only for jstorm
// boltDeclarer.localFirstGrouping(SequenceTopologyDef.SEQUENCE_SPOUT_NAME);
boltDeclarer
- .localOrShuffleGrouping(SequenceTopologyDef.SEQUENCE_SPOUT_NAME)
+ .shuffleGrouping(SequenceTopologyDef.SEQUENCE_SPOUT_NAME)
.addConfiguration(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 3);
} else {
@@ -173,51 +186,13 @@ public class SequenceTopology {
private static Map conf = new HashMap<Object, Object>();
- public static void LoadProperty(String prop) {
- Properties properties = new Properties();
-
- try {
- InputStream stream = new FileInputStream(prop);
- properties.load(stream);
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + prop);
- } catch (Exception e1) {
- e1.printStackTrace();
-
- return;
- }
-
- conf.putAll(properties);
- }
-
- public static void LoadYaml(String confPath) {
-
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
-
- conf = (Map) yaml.load(stream);
- if (conf == null || conf.isEmpty() == true) {
- throw new RuntimeException("Failed to read config file");
- }
-
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- return;
- }
+
public static void LoadConf(String arg) {
if (arg.endsWith("yaml")) {
- LoadYaml(arg);
+ conf = LoadConf.LoadYaml(arg);
} else {
- LoadProperty(arg);
+ conf = LoadConf.LoadProperty(arg);
}
}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyDef.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyDef.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyDef.java
old mode 100644
new mode 100755
index ac2f67c..6e39806
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyDef.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyDef.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyTool.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyTool.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyTool.java
old mode 100644
new mode 100755
index b0da79b..2ae73c2
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyTool.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyTool.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence;
import backtype.storm.Config;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyUserDefine.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyUserDefine.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyUserDefine.java
old mode 100644
new mode 100755
index b869d60..6334a9a
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyUserDefine.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/SequenceTopologyUserDefine.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence;
import java.util.ArrayList;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/Pair.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/Pair.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/Pair.java
old mode 100644
new mode 100755
index ae79c90..d454058
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/Pair.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/Pair.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bean;
import java.io.Serializable;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/PairMaker.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/PairMaker.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/PairMaker.java
old mode 100644
new mode 100755
index b533f3a..95acd5e
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/PairMaker.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/PairMaker.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bean;
import java.util.Random;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/TradeCustomer.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/TradeCustomer.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/TradeCustomer.java
old mode 100644
new mode 100755
index b23a941..65ee75b
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/TradeCustomer.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bean/TradeCustomer.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bean;
import java.io.Serializable;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/MergeRecord.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/MergeRecord.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/MergeRecord.java
old mode 100644
new mode 100755
index de14fbd..28640fa
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/MergeRecord.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/MergeRecord.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bolt;
import java.util.HashMap;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/PairCount.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/PairCount.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/PairCount.java
old mode 100644
new mode 100755
index c30147f..309fc98
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/PairCount.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/PairCount.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bolt;
import java.util.Map;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/SplitRecord.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/SplitRecord.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/SplitRecord.java
old mode 100644
new mode 100755
index 7107ccd..b20d4d3
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/SplitRecord.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/SplitRecord.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bolt;
import java.util.Map;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/TotalCount.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/TotalCount.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/TotalCount.java
old mode 100644
new mode 100755
index a01c3ed..20e5456
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/TotalCount.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/bolt/TotalCount.java
@@ -1,13 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.bolt;
import java.util.Map;
-import java.util.Random;
import java.util.concurrent.atomic.AtomicLong;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import backtype.storm.Constants;
import backtype.storm.task.OutputCollector;
import backtype.storm.task.TopologyContext;
import backtype.storm.topology.IRichBolt;
@@ -15,17 +30,9 @@ import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Tuple;
import backtype.storm.utils.TupleHelpers;
-import com.alibaba.jstorm.client.metric.MetricCallback;
-import com.alibaba.jstorm.client.metric.MetricClient;
-import com.alibaba.jstorm.metric.JStormHistogram;
-import com.alibaba.jstorm.metric.JStormTimer;
import com.alibaba.jstorm.utils.JStormUtils;
import com.alipay.dw.jstorm.example.TpsCounter;
import com.alipay.dw.jstorm.example.sequence.bean.TradeCustomer;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Meter;
-import com.codahale.metrics.Metric;
public class TotalCount implements IRichBolt {
public static Logger LOG = LoggerFactory.getLogger(TotalCount.class);
@@ -37,13 +44,6 @@ public class TotalCount implements IRichBolt {
private boolean checkTupleId = false;
private boolean slowDonw = false;
- private MetricClient metricClient;
- private Gauge<Integer> myGauge;
- private JStormTimer myTimer;
- private Counter myCounter;
- private Meter myMeter;
- private JStormHistogram myJStormHistogram;
- private MetricCallback myCallback;
@Override
@@ -59,35 +59,6 @@ public class TotalCount implements IRichBolt {
slowDonw = JStormUtils.parseBoolean(stormConf.get("bolt.slow.down"), false);
- metricClient = new MetricClient(context);
- myCallback = new MetricCallback<Metric>() {
-
- @Override
- public void callback(Metric metric) {
- LOG.info("Callback " + metric.getClass().getName() + ":" + metric);
- }
- };
-
-
- myGauge = new Gauge<Integer>() {
- private Random random = new Random();
-
- @Override
- public Integer getValue() {
-
- return random.nextInt(100);
- }
-
- };
- myGauge = (Gauge<Integer>) metricClient.registerGauge("name1", myGauge, myCallback);
-
- myTimer = metricClient.registerTimer("name2", myCallback);
-
- myCounter = metricClient.registerCounter("name3", myCallback);
-
- myMeter = metricClient.registerMeter("name4", myCallback);
-
- myJStormHistogram = metricClient.registerHistogram("name5", myCallback);
@@ -105,11 +76,9 @@ public class TotalCount implements IRichBolt {
return ;
}
long before = System.currentTimeMillis();
- myTimer.start();
+
try {
//LOG.info(input.toString());
- myCounter.inc();
- myMeter.mark();
if (checkTupleId) {
Long tupleId = input.getLong(0);
@@ -138,10 +107,9 @@ public class TotalCount implements IRichBolt {
JStormUtils.sleepMs(20);
}
}finally {
- myTimer.stop();
+
}
- long after = System.currentTimeMillis();
- myJStormHistogram.update(after - before);
+
}
public void cleanup() {
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/spout/SequenceSpout.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/spout/SequenceSpout.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/spout/SequenceSpout.java
old mode 100644
new mode 100755
index abdd69f..560dbd8
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/spout/SequenceSpout.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/sequence/spout/SequenceSpout.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.sequence.spout;
import java.util.Map;
@@ -14,13 +31,10 @@ import backtype.storm.topology.IRichSpout;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
import com.alibaba.jstorm.client.ConfigExtension;
-import com.alibaba.jstorm.utils.JStormServerUtils;
import com.alibaba.jstorm.utils.JStormUtils;
import com.alipay.dw.jstorm.example.TpsCounter;
-import com.alipay.dw.jstorm.example.sequence.SequenceTopologyDef;
import com.alipay.dw.jstorm.example.sequence.bean.Pair;
import com.alipay.dw.jstorm.example.sequence.bean.PairMaker;
import com.alipay.dw.jstorm.example.sequence.bean.TradeCustomer;
@@ -169,6 +183,7 @@ public class SequenceSpout implements IRichSpout {
if (isFinished == true) {
LOG.info("Finish sending ");
+ JStormUtils.sleepMs(10000);
return;
}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/transcation/TransactionalGlobalCount.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/transcation/TransactionalGlobalCount.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/transcation/TransactionalGlobalCount.java
old mode 100644
new mode 100755
index 490aba0..88ed0e7
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/transcation/TransactionalGlobalCount.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/transcation/TransactionalGlobalCount.java
@@ -1,165 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.transcation;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alipay.dw.jstorm.example.TpsCounter;
-
-import backtype.storm.Config;
-import backtype.storm.StormSubmitter;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.testing.MemoryTransactionalSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBatchBolt;
-import backtype.storm.topology.base.BaseTransactionalBolt;
-import backtype.storm.transactional.ICommitter;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.transactional.TransactionalTopologyBuilder;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-public class TransactionalGlobalCount {
- public static Logger LOG = LoggerFactory.getLogger(TransactionalGlobalCount.class);
-
- public static final int PARTITION_TAKE_PER_BATCH = 3;
- public static final Map<Integer, List<List<Object>>> DATA = new HashMap<Integer, List<List<Object>>>() {{
- put(0, new ArrayList<List<Object>>() {{
- add(new Values("cat"));
- add(new Values("dog"));
- add(new Values("chicken"));
- add(new Values("cat"));
- add(new Values("dog"));
- add(new Values("apple"));
- }});
- put(1, new ArrayList<List<Object>>() {{
- add(new Values("cat"));
- add(new Values("dog"));
- add(new Values("apple"));
- add(new Values("banana"));
- }});
- put(2, new ArrayList<List<Object>>() {{
- add(new Values("cat"));
- add(new Values("cat"));
- add(new Values("cat"));
- add(new Values("cat"));
- add(new Values("cat"));
- add(new Values("dog"));
- add(new Values("dog"));
- add(new Values("dog"));
- add(new Values("dog"));
- }});
- }};
-
- public static class Value {
- int count = 0;
- BigInteger txid;
- }
-
- public static Map<String, Value> DATABASE = new HashMap<String, Value>();
- public static final String GLOBAL_COUNT_KEY = "GLOBAL-COUNT";
-
- public static class BatchCount extends BaseBatchBolt {
- Object _id;
- BatchOutputCollector _collector;
-
- int _count = 0;
-
- @Override
- public void prepare(Map conf, TopologyContext context, BatchOutputCollector collector, Object id) {
- _collector = collector;
- _id = id;
- }
-
- @Override
- public void execute(Tuple tuple) {
- _count++;
- LOG.info("---BatchCount.execute(), _count=" + _count);
- }
-
- @Override
- public void finishBatch() {
- _collector.emit(new Values(_id, _count));
- LOG.info("---BatchCount.finishBatch(), _id=" + _id + ", _count=" + _count);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("id", "count"));
- }
- }
-
- public static class UpdateGlobalCount extends BaseTransactionalBolt implements ICommitter {
- TransactionAttempt _attempt;
- BatchOutputCollector _collector;
-
- int _sum = 0;
-
- @Override
- public void prepare(Map conf, TopologyContext context, BatchOutputCollector collector, TransactionAttempt attempt) {
- _collector = collector;
- _attempt = attempt;
- }
-
- @Override
- public void execute(Tuple tuple) {
- _sum += tuple.getInteger(1);
- LOG.info("---UpdateGlobalCount.execute(), _sum=" + _sum);
- }
-
- @Override
- public void finishBatch() {
- Value val = DATABASE.get(GLOBAL_COUNT_KEY);
- Value newval;
- if (val == null || !val.txid.equals(_attempt.getTransactionId())) {
- newval = new Value();
- newval.txid = _attempt.getTransactionId();
- if (val == null) {
- newval.count = _sum;
- }
- else {
- newval.count = _sum + val.count;
- }
- DATABASE.put(GLOBAL_COUNT_KEY, newval);
- }
- else {
- newval = val;
- }
- _collector.emit(new Values(_attempt, newval.count));
- LOG.info("---UpdateGlobalCount.finishBatch(), _attempt=" + _attempt
- + ", newval=(" + newval.txid + "," + newval.count + ")");
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("id", "sum"));
- }
- }
-
- public static void main(String[] args) throws Exception {
- MemoryTransactionalSpout spout = new MemoryTransactionalSpout(DATA, new Fields("word"), PARTITION_TAKE_PER_BATCH);
- TransactionalTopologyBuilder builder = new TransactionalTopologyBuilder("global-count", "spout", spout, 2);
- builder.setBolt("partial-count", new BatchCount(), 3).noneGrouping("spout");
- builder.setBolt("sum", new UpdateGlobalCount(), 1).globalGrouping("partial-count");
-
-// LocalCluster cluster = new LocalCluster();
-
- Config config = new Config();
- config.setDebug(true);
- config.setMaxSpoutPending(3);
- config.put(Config.TOPOLOGY_WORKERS, 9);
- Config.setNumAckers(config, 0);
-
- StormSubmitter.submitTopology("global-count-topology", config, builder.buildTopology());
-
-// Thread.sleep(3000);
-// cluster.shutdown();
- }
+import java.math.BigInteger;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.jstorm.utils.LoadConf;
+
+import backtype.storm.Config;
+import backtype.storm.LocalCluster;
+import backtype.storm.StormSubmitter;
+import backtype.storm.coordination.BatchOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.testing.MemoryTransactionalSpout;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseBatchBolt;
+import backtype.storm.topology.base.BaseTransactionalBolt;
+import backtype.storm.transactional.ICommitter;
+import backtype.storm.transactional.TransactionAttempt;
+import backtype.storm.transactional.TransactionalTopologyBuilder;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+
+
+/**
+ * This is a basic example of a transactional topology. It keeps a count of the number of tuples seen so far in a
+ * database. The source of data and the databases are mocked out as in memory maps for demonstration purposes. This
+ * class is defined in depth on the wiki at https://github.com/nathanmarz/storm/wiki/Transactional-topologies
+ */
+public class TransactionalGlobalCount {
+ private static final Logger LOG = LoggerFactory.getLogger(TransactionalGlobalCount.class);
+
+ public static final int PARTITION_TAKE_PER_BATCH = 3;
+ public static final Map<Integer, List<List<Object>>> DATA = new HashMap<Integer, List<List<Object>>>() {{
+ put(0, new ArrayList<List<Object>>() {{
+ add(new Values("cat"));
+ add(new Values("dog"));
+ add(new Values("chicken"));
+ add(new Values("cat"));
+ add(new Values("dog"));
+ add(new Values("apple"));
+ }});
+ put(1, new ArrayList<List<Object>>() {{
+ add(new Values("cat"));
+ add(new Values("dog"));
+ add(new Values("apple"));
+ add(new Values("banana"));
+ }});
+ put(2, new ArrayList<List<Object>>() {{
+ add(new Values("cat"));
+ add(new Values("cat"));
+ add(new Values("cat"));
+ add(new Values("cat"));
+ add(new Values("cat"));
+ add(new Values("dog"));
+ add(new Values("dog"));
+ add(new Values("dog"));
+ add(new Values("dog"));
+ }});
+ }};
+
+ public static class Value {
+ int count = 0;
+ BigInteger txid;
+ }
+
+ public static Map<String, Value> DATABASE = new HashMap<String, Value>();
+ public static final String GLOBAL_COUNT_KEY = "GLOBAL-COUNT";
+
+ public static class BatchCount extends BaseBatchBolt {
+ Object _id;
+ BatchOutputCollector _collector;
+
+ int _count = 0;
+
+ @Override
+ public void prepare(Map conf, TopologyContext context, BatchOutputCollector collector, Object id) {
+ _collector = collector;
+ _id = id;
+ }
+
+ @Override
+ public void execute(Tuple tuple) {
+ _count++;
+ }
+
+ @Override
+ public void finishBatch() {
+ _collector.emit(new Values(_id, _count));
+ LOG.info("@@@@@@@@@@@@@@ BatchCount finishBatch @@@@@@@@@@@@@@@");
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("id", "count"));
+ }
+ }
+
+ public static class UpdateGlobalCount extends BaseTransactionalBolt implements ICommitter {
+ TransactionAttempt _attempt;
+ BatchOutputCollector _collector;
+
+ int _sum = 0;
+
+ @Override
+ public void prepare(Map conf, TopologyContext context, BatchOutputCollector collector, TransactionAttempt attempt) {
+ _collector = collector;
+ _attempt = attempt;
+ }
+
+ @Override
+ public void execute(Tuple tuple) {
+ _sum += tuple.getInteger(1);
+ }
+
+ @Override
+ public void finishBatch() {
+ Value val = DATABASE.get(GLOBAL_COUNT_KEY);
+ Value newval;
+ if (val == null || !val.txid.equals(_attempt.getTransactionId())) {
+ newval = new Value();
+ newval.txid = _attempt.getTransactionId();
+ if (val == null) {
+ newval.count = _sum;
+ }
+ else {
+ newval.count = _sum + val.count;
+ }
+ DATABASE.put(GLOBAL_COUNT_KEY, newval);
+ }
+ else {
+ newval = val;
+ }
+ _collector.emit(new Values(_attempt, newval.count));
+ LOG.info("@@@@@@@@@@@@@@ UpdateGlobalCount finishBatch @@@@@@@@@@@@@@@");
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declare(new Fields("id", "sum"));
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ MemoryTransactionalSpout spout = new MemoryTransactionalSpout(DATA, new Fields("word"), PARTITION_TAKE_PER_BATCH);
+ TransactionalTopologyBuilder builder = new TransactionalTopologyBuilder("global-count", "spout", spout, 3);
+ builder.setBolt("partial-count", new BatchCount(), 5).noneGrouping("spout");
+ builder.setBolt("sum", new UpdateGlobalCount()).globalGrouping("partial-count");
+
+ Config config = new Config();
+ config.setDebug(true);
+ config.setMaxSpoutPending(3);
+
+ if (args.length == 0){
+ LocalCluster cluster = new LocalCluster();
+
+ cluster.submitTopology("global-count-topology", config, builder.buildTopology());
+
+ Thread.sleep(100000);
+ cluster.shutdown();
+ }else {
+
+ config.setNumWorkers(3);
+ try {
+ Map yamlConf = LoadConf.LoadYaml(args[0]);
+ if (yamlConf != null) {
+ config.putAll(yamlConf);
+ }
+ }catch (Exception e) {
+ System.out.println("Input " + args[0] + " isn't one yaml ");
+ }
+
+ StormSubmitter.submitTopology("global", config, builder.buildTopology());
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/start.sh
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/start.sh b/example/sequence-split-merge/start.sh
old mode 100644
new mode 100755
index 0a050d5..dc634f2
--- a/example/sequence-split-merge/start.sh
+++ b/example/sequence-split-merge/start.sh
@@ -1,4 +1,6 @@
#!/bin/bash
-jstorm jar target/sequence-split-merge-1.0.8-jar-with-dependencies.jar com.alipay.dw.jstorm.example.sequence.SequenceTopology conf/conf.prop
+#jstorm jar target/sequence-split-merge-1.1.0-jar-with-dependencies.jar com.alipay.dw.jstorm.transcation.TransactionalGlobalCount global
+jstorm jar target/sequence-split-merge-1.1.0-jar-with-dependencies.jar com.alipay.dw.jstorm.example.sequence.SequenceTopology conf/conf.yaml
+#jstorm jar target/sequence-split-merge-1.0.8-jar-with-dependencies.jar com.alipay.dw.jstorm.example.sequence.SequenceTopology conf/conf.prop
#jstorm jar target/sequence-split-merge-1.0.8-jar-with-dependencies.jar com.alipay.dw.jstorm.example.batch.SimpleBatchTopology conf/topology.yaml
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/history.md
----------------------------------------------------------------------
diff --git a/history.md b/history.md
old mode 100644
new mode 100755
index 9de0fde..efc2056
--- a/history.md
+++ b/history.md
@@ -1,397 +1,473 @@
-[JStorm English introduction](http://42.121.19.155/jstorm/JStorm-introduce-en.pptx)
-[JStorm Chinese introduction](http://42.121.19.155/jstorm/JStorm-introduce.pptx)
-
-#Release 0.9.6.3
-## New features
-1. Implement tick tuple
-2. Support logback
-3. Support to load the user defined configuration file of log4j
-4. Enable the display of user defined metrics in web UI
-5. Add "topologyName" parameter for "jstorm list" command
-6. Support the use of ip and hostname at the same for user defined schedule
-7. Support junit test for local mode
-8. Enable client command(e.g. jstorm jar) to load self-defined storm.yaml
-## Bug fix
-1. Add activate and deactivate api of spout, which are used in nextTuple prepare phase
-2. Update the support of multi language
-3. Check the worker's heartbeat asynchronously to speed up the lunch of worker
-4. Add the check of worker's pid to speed up the detect of dead worker
-5. Fix the high cpu load of disruptor producer when disruptor queue is full
-6. Remove the confused exception reported by disruptor queue when killing worker
-7. Fix the failure problem of "jstorm restart" client command
-8. Report error when user submits the jar built on a incompatible jstorm release
-9. Fix the problem that one log will printed twice when user define a configuration of log4j or logback on local mode
-10. Fix the potential exception when killing topology on local mode
-11. Forbid user to change the log level of jstorm log
-12. Add a configuration template of logback
-13. Fix the problem that process the upload of lib jar as application jar
-14. Makesure the clean of ZK node for a topology which is removed
-15. Add the information of topology name when java core dump
-16. Fix the incorrect value of -XX:MaxTenuringThreshold. Currently, the default value of jstorm is 20, but the max value in JDK8 is 15.
-17. Fix the potential reading failure of cpu core number, which may cause the supervisor slot to be set to 0
-18. Fix the "Address family not supported by protocol family" error on local mode
-19. Do not start logview http server on local mode
-20. Add the creation of log dir in supervisor alive checking scription
-21. Check the correctness of ip specified in configuration file before starting nimbus
-22. Check the correctness of env variable $JAVA_HOME/$JSTORM_HOME/$JSTORM_CONF_DIR before starting jstorm service
-23. Specify the log dir for rpm installation
-24. Add reading permission of /home/admin/jstorm and /home/admin/logs for all users after rpm installation
-25. Config local temporay ports when rpm installation
-26. Add noarch rpm package
-
-#Release 0.9.6.2
-1. Add option to switch between BlockingQueue and Disruptor
-2. Fix the bug which under sync netty mode, client failed to send message to server
-3. Fix the bug let web UI can dispaly 0.9.6.1 cluster
-4. Fix the bug topology can be submited without main jar but a lot of little jar
-5. Fix the bug restart command
-6. Fix the bug trident bug
-7. Add the validation of topology name, component name... Only A-Z, a-z, 0-9, '_', '-', '.' are valid now.
-8. Fix the bug close thrift client
-
-#Release 0.9.6.2-rc
-1. Improve user experience from Web UI
-1.1 Add jstack link
-1.2 Add worker log link in supervisor page
-1.3 Add Web UI log encode setting "gbk" or "utf-8"
-1.4 Show starting tasks in component page
-1.5 Show dead task's information in UI
-1.6 Fix the bug that error info can not be displayed in UI when task is restarting
-2. Add restart command, with this command, user can reload configuration, reset worker/task parallism
-3. Upgrade curator/disruptor/guava version
-4. Revert json lib to google-simple json, wrap all json operation into two utility method
-5. Add new storm submit api, supporting submit topology under java
-6. Enable launch process with backend method
-7. Set "spout.pending.full.sleep" default value as true
-8. Fix the bug user define sceduler not support a list of workers
-9. Add disruptor/JStormUtils junit test
-10. Enable user to configure the name of monitor name of alimonitor
-11. Add tcp option "reuseAddress" in netty framework
-12. Fix the bug: When spout does not implement the ICommitterTrident interface, MasterCoordinatorSpout will stick on commit phase.
-
-#Release 0.9.6.2-rc
-1. Improve user experience from Web UI
-1.1 Add jstack link
-1.2 Add worker log link in supervisor page
-1.3 Add Web UI log encode setting "gbk" or "utf-8"
-1.4 Show starting tasks in component page
-1.5 Show dead task's information in UI
-1.6 Fix the bug that error info can not be displayed in UI when task is restarting
-2. Add restart command, with this command, user can reload configuration, reset worker/task parallism
-3. Upgrade curator/disruptor/guava version
-4. Revert json lib to google-simple json, wrap all json operation into two utility method
-5. Add new storm submit api, supporting submit topology under java
-6. Enable launch process with backend method
-7. Set "spout.pending.full.sleep" default value as true
-8. Fix the bug user define sceduler not support a list of workers
-9. Add disruptor/JStormUtils junit test
-10. Enable user to configure the name of monitor name of alimonitor
-11. Add tcp option "reuseAddress" in netty framework
-12. Fix the bug: When spout does not implement the ICommitterTrident interface, MasterCoordinatorSpout will stick on commit phase.
-
-#Release 0.9.6.1
-1. Add management of multiclusters to Web UI. Added management tools for multiclusters in WebUI.
-2. Merged Trident API from storm-0.9.3
-3. Replaced gson with fastjson
-4. Refactored metric json generation code.
-5. Stored version info with $JSTORM_HOME/RELEASE.
-6. Replaced SingleThreadDisruptorQueue with MultiThreadDisruptorQueue in task deserialize thread.
-7. Fixed issues with worker count on Web UI.
-8. Fixed issues with accessing the task map with multi-threads.
-9. Fixed NullPointerException while killing worker and reading worker's hearbeat object.
-10. Netty client connect to server only in NettyClient module.
-11. Add break loop operation when netty client connection is closed
-12. Fix the bug that topology warning flag present in cluster page is not consistent with error information present in topology page
-13. Add recovery function when the data of task error information is corrupted
-14. Fix the bug that the metric data can not be uploaded onto Alimonitor when ugrading from pre-0.9.6 to 0.9.6 and executing pkill java without restart the topologying
-15. Fix the bug that zeroMq failed to receive data
-16. Add interface to easily setting worker's memory
-17. Set default value of topology.alimonitor.metrics.post to false
-18. Only start NETTY_SERVER_DECODE_TIME for netty server
-19. Keep compatible with Storm for local mode
-20. Print rootId when tuple failed
-21. In order to keep compatible with Storm, add submitTopologyWithProgressBar interface
-22. Upgrade netty version from 3.2.7 to 3.9.0
-23. Support assign topology to user-defined supervisors
-
-
-#Release 0.9.6
-1. Update UI
- - Display the metrics information of task and worker
- - Add warning flag when errors occur for a topology
- - Add link from supervisor page to task page
-2. Send metrics data to Alimonitor
-3. Add metrics interface for user
-4. Add task.cleanup.timeout.sec setting to let task gently cleanup
-5. Set the worker's log name as topologyName-worker-port.log
-6. Add setting "worker.redirect.output.file", so worker can redirect System.out/System.err to one setting file
-7. Add storm list command
-8. Add closing channel check in netty client to avoid double close
-9. Add connecting check in netty client to avoid connecting one server twice at one time
-
-#Release 0.9.5.1
-1. Add netty sync mode
-2. Add block operation in netty async mode
-3. Replace exception with Throwable in executor layer
-4. Upgrade curator-framework version from 1.15 to 1.3.2
-5. Add more netty junit test
-6. Add log when queue is full
-
-#Release 0.9.5
-##Big feature:
-1. Redesign scheduler arithmetic, basing worker not task .
-
-## Bug fix
-1. Fix disruptor use too much cpu
-2. Add target NettyServer log when f1ail to send data by netty
-
-#Release 0.9.4.1
-##Bug fix:
-1. Improve speed between tasks who is running in one worker
-2. Fix wrong timeout seconds
-3. Add checking port when worker initialize and begin to kill old worker
-4. Move worker hearbeat thread before initializing tasks
-5. Move init netty-server before initializeing tasks
-6. Check whether tuple's rootId is duplicated
-7. Add default value into Utils.getInt
-8. Add result function in ReconnectRunnable
-9. Add operation to start Timetick
-10. Halt process when master nimbus lost ZK node
-11. Add exception catch when cgroups kill process
-12. Speed up reconnect to netty-server
-13. Share one task hearbeat thread for all tasks
-14. Quickly haltprocess when initialization failed.
-15. Check web-ui logview page size
-
-
-
-#Release 0.9.4
-
-## Big features
-1. Add transaction programming mode
-2. Rewrite netty code, 1. use share boss/worker thread pool;2 async send batch tuples;3 single thread to do reconnect job;4 receive batch tuples
-3. Add metrics and statics
-4. Merge Alimama storm branch into this version, submit jar with -conf, -D, -lib
-
-
-## Enhancement
-1. add setting when supervisor has been shutdown, worker will shutdown automatically
-2. add LocalFristGrouping api
-3. enable cgroup for normal user
-
-
-
-##Bug fix:
-1. Setting buffer size when upload jar
-2. Add lock between ZK watch and timer thread when refresh connection
-3. Enable nimbus monitor thread only when topology is running in cluster mode
-4. Fix exception when failed to read old assignment of ZK
-5. classloader fix when both parent and current classloader load the same class
-6. Fix log view null pointer exception
-
-#Release 0.9.3.1
-
-## Enhancement
-1. switch apache thrift7 to storm thrift7
-2. set defatult acker number is 1
-3. add "spout.single.thread" setting
-4. make nimbus logview port different from supervisor's
-5. web ui can list all files of log's subdir
-6. Set gc dump dir as log's dir
-
-
-#Release 0.9.3
-## New feature
-1. Support Aliyun Apsara/Hadoop Yarn
-
-## Enhancement
-1. Redesign Logview
-2. Kill old worker under the same port when worker is starting
-3. Add zk information/version information on UI
-4. Add nodeport information for dead task in nimbus
-5. Add interface to get values when spout doing ack
-6. Add timeout statics in bolt
-7. jstorm script return status
-8. Add logs when fail to deserialize tuple
-9. Skip sleep operation when max_pending is 1 and waiting ack
-10. Remove useless dependency
-11. Longer task timeout setting
-12. Add supervisor.use.ip setting
-13. Redirect supervisor out/err to /dev/null, redirect worker out/err to one file
-
-
-## Bug Fix
-1. Fix kryo fail to deserialize object when enable classloader
-2. Fix fail to reassign dead task when worker number is less than topology apply
-3. Set samller jvm heap memory for jstorm-client
-4. Fix fail to set topology status as active when do rebalance operation twice at one time,
-5. Fix local mode bug under linux
-6. Fix average latency isn't accurate
-7. GC tuning.
-8. Add default kill function for AysncLoopRunnable
-
-
-
-#Release 0.9.2
-## New feature
-1. Support LocalCluster/LocalDrpc mode, support debugging topology under local mode
-2. Support CGroups, assigning CPU in hardware level.
-3. Support simple logview
-
-## Bug fix or enhancement
-1. Change SpoutExecutor's RotatingMap to TimeCacheMap, when putting too much timeout tuple is easy to cause deadlock in spout acker thread
-2. Tunning gc parameter, improve performance and avoid full GC
-3. Improve Topology's own gc priority, make it higher than JStorm system setting.
-4. Tuning Nimbus HA, switch nimbus faster, when occur nimbus failure.
-5. Fix bugs found by FindBugs tool.
-6. Revert Trident interface to 0.8.1, due to 0.8.1's trident interface's performance is better.
-7. Setting nimbus.task.timeout.secs as 60 to avoid nimbus doing assignment when task is under full gc.
-8. Setting default rpc framework as netty
-9. Tunning nimbus shutdown flow
-10. Tunning worker shutdown flow
-11. Add task heartbeat log
-12. Optimize Drpc/LocalDrpc source code.
-13. Move classloader to client jar.
-14 Fix classloader fail to load anonymous class
-15. Web Ui display slave nimbus
-16. Add thrift max read buffer size
-17. Setting CPU slot base double
-18. Move Zk utility to jstorm-client-extension.jar
-19. Fix localOrShuffle null pointer
-20. Redirecting worker's System.out/System.err to file is configurable.
-21. Add new RPC frameworker JeroMq
-22. Fix Zk watcher miss problem
-23. Update sl4j 1.5.6 to 1.7.5
-24. Shutdown worker when occur exception in Smart thread
-25. Skip downloading useless topology in Supervisor
-26. Redownload the topology when failed to deserialize topology in Supervisor.
-27. Fix topology codeDir as resourceDir
-28. Catch error when normalize topology
-29. Add log when found one task is dead
-30. Add maven repository, JStorm is able to build outside of Alibaba
-31. Fix localOrShuffle null pointer exception
-32. Add statics counting for internal tuples in one worker
-33. Add thrift.close after download topology binary in Supervisor
-
-
-# Release 0.9.1
-
-## new features
-1. Application classloader. when Application jar is conflict with jstorm jar,
- please enable application classloader.
-2. Group Quato, Different group with different resource quato.
-
-## Bug fix or enhancement
-1. Fix Rotation Map competition issue.
-2. Set default acker number as 0
-3. Set default spout/bolt number as 1
-4. Add log directory in log4j configuration file
-5. Add transaction example
-6. Fix UI showing wrong worker numbe in topology page
-7. Fix UI showing wrong latency in topology page
-8. Replace hardcode Integer convert with JStormUtils.parseInt
-9. Support string parse in Utils.getInt
-10. Remove useless dependency in pom.xml
-11. Support supervisor using IP or special hostname
-12. Add more details when no resource has been assigned to one new topology
-13. Replace normal thread with Smart thread
-14. Add gc details
-15. Code format
-16. Unify stormId and topologyId as topologyId
-17. Every nimbus will regist ip to ZK
-
-
-
-# Release 0.9.0
-In this version, it will follow storm 0.9.0 interface, so the application running
-on storm 0.9.0 can run in jstorm 0.9.0 without any change.
-
-## Stability
-1. provide nimbus HA. when the master nimbus shuts down, it will select another
- online nimbus to be the master. There is only one master nimbus online
- any time and the slave nimbuses just synchronouse the master's data.
-2. RPC through netty is stable, the sending speed is match with receiving speed.
-
-
-## Powerful scheduler
-1. Assigning resource on four dimensions:cpu, mem, disk, net
-2. Application can use old assignment.
-3. Application can use user-define resource.
-4. Task can apply extra cpu slot or memory slot.
-4. Application can force tasks run on different supervisor or the same supervisor
-
-
-
-
-
-
-
-
-# Release 0.7.1
-In this version, it will follow storm 0.7.1 interface, so the topology running
-in storm 0.7.1 can run in jstorm without any change.
-
-## Stability
-* Assign workers in balance
-* add setting "zmq.max.queue.msg" for zeromq
-* communication between worker and tasks without zeromq
-* Add catch exception operation
- * in supervisor SyncProcess/SyncSupervisor
- * add catch exception and report_error in spout's open and bolt's prepare
- * in all IO operation
- * in all serialize/deserialize
- * in all ZK operation
- * in topology upload/download function
- * during initialization zeromq
-* do assignmen/reassignment operation in one thread to avoid competition
-* redesign nimbus 's topology assign algorithm, make the logic simple much.
-* redesign supervisor's sync assignment algorithm, make the logic simple much
-* reduce zookeeper load
- * redesign nimbus monitor logic, it will just scan tasks' hearbeat, frequency is 10s
- * nimbus cancel watch on supervisor
- * supervisor heartbeat frequence change to 10s
- * supervisor syncSupervisor/syncProcess frequence change to 10s
- * supervisor scan /$(ZKROOT)/assignment only once in one monitor loop
- * task hearbeat change to 10s
-* create task pid file before connection zk, this is very import when zk is unstable.
-
-
-## Performance tuning
-* reduce once memory copy when deserialize tuple, improve performance huge.
-* split executor thread as two thread, one handing receive tuples, one sending tuples, improve performance much
-* redeisign sample code, it will sampling every 5 seconds, not every 20 tuple once, improve performance much
-* simplify the ack's logic, make acker more effeciency
-* Communication between worker and tasks won't use zeromq, just memory share in process
-* in worker's Drainer/virtualportdispatch thread, spout/bolt recv/send thread,
- the thread will sleep 1 ms when there is not tuple in one loop
-* communication between worker and tasks without zeromq
-* sampling frequence change to 5s, not every 20 tuple once.
-
-## Enhancement:
-* add IFailValueSpout interface
-* Redesign sampling code, collection statics model become more common.
- * Add sending/recving tps statics, statics is more precise.
-* Atomatically do deactivate action when kill/rebalance topology, and the wait time is 2 * MSG_TIMEOUT
-* fix nongrouping bug, random.nextInt will generate value less than 0.
-* Sleep one setting time(default is 1 minute) after finish spout open,
- which is used to wait other task finish initialization.
-* Add check component name when submit topology, forbidding the component
- which name start with "__"
-* change the zk's node /$(ZKROOT)/storm to /$(ZKROOT)/topology
-* abstract topology check logic from generating real topology function
-* when supervisor is down and topology do rebalance, the alive task under down
- supervisor is unavailable.
-* add close connection operation after finish download topology binary
-* automatically create all local dirtorie, such as
- /$(LOCALDIR)/supervisor/localstate
-* when killing worker, add "kill and sleep " operation before "kill -9" operation
-* when generate real topology binary,
- * configuration priority different.
- component configuration > topology configuration > system configuration
- * skip the output stream which target component doesn't exist.
- * skip the component whose parallism is 0.
- * component's parallism is less than 0, throw exception.
-* skip ack/fail when inputstream setting is empty
-* add topology name to the log
-* fix ui select option error, default is 10 minutes
+[JStorm English introduction](http://42.121.19.155/jstorm/JStorm-introduce-en.pptx)
+[JStorm Chinese introduction](http://42.121.19.155/jstorm/JStorm-introduce.pptx)
+
+#Release 2.0.4-SNAPSHOT
+## New features
+1.Redesign Metric/Monitor system, new RollingWindow/Metrics/NettyMetrics, all data will send/recv through thrift
+2.Redesign Web-UI, the new Web-UI code is clear and clean
+3.Add NimbusCache Layer, using RocksDB and TimeCacheWindow
+4.Refactoring all ZK structure and ZK operation
+5.Refactoring all thrift structure
+6.Merge jstorm-client/jstorm-client-extension/jstorm-core 3 modules into jstorm-core
+7.set the dependency version same as storm
+8.Sync apache-storm-0.10.0-beta1 all java code
+9.Switch log system to logback
+10.Upgrade thrift to apache thrift 0.9.2
+11. Performance tuning Huge topology more than 600 workers or 2000 tasks
+12. Require jdk7 or higher
+
+#Release 0.9.7.1
+## New Features
+1. Batch the tuples whose target task is same, before sending out(task.batch.tuple=true,task.msg.batch.size=4).
+2. LocalFirst grouping is updated. If all local tasks are busy, the tasks of outside nodes will be chosen as target task instead of waiting on the busy local task.
+3. Support user to reload the application config when topology is running.
+4. Support user to define the task heartbeat timeout and task cleanup timeout for topology.
+5. Update the wait strategy of disruptor queue to no-blocking mode "TimeoutBlockingWaitStrategy"
+6. Support user to define the timeout of discarding messages that are pending for a long time in netty buffer.
+7. Update the message processing structure. The virtualPortDispatch and drainer thread are removed to reduce the unnecessary cost of cpu and the transmitting of tuples
+8. Add jstorm parameter "--include-jars" when submit topology, add these jar to classpath
+9. Nimbus or Supervisor suicide when the local ip is 127.0.0.0
+10. Add user-define-scheduler example
+11. Merge Supervisor's syncSupervisor and syncProcess
+## Bug Fix
+1. Improve the GC setting.
+2. Fix the bug that task heartbeat might not be updated timely in some scenarioes.
+3. Fix the bug that the reconnection operation might be stick for a unexpected period when the connection to remote worker is shutdown and some messages are buffer in netty.
+4. Reuse thrift client when submit topology
+5. Avoid repeatedly download binary when failed to start worker.
+## Changed setting
+1. Change task's heartbeat timeout to 4 minutes
+2. Set the netty client thread pool(clientScheduleService) size as 5
+## Deploy and scripts
+1. Improve cleandisk.sh, avoid delete current directory and /tmp/hsperfdata_admin
+2. Add executable attribute for the script under example
+3. Add parameter to stat.sh, which can be used to start supervisor or not. This is useful under virtual
+
+#Release 0.9.7
+## New Features
+1. Support dynamic scale-out/scale-in of worker, spout, bolt or acker without stopping the service of topology.
+2. When enable cgroup, Support the upper limit control of cpu core usage. Default setting is 3 cpu cores.
+3. Update the mechanism of task heartbeats to make heartbeat to track the status of spout/bolt execute thread correctly.
+4. Support to add jstorm prefix info(clusterName, topologyName, ip:port, componentName, taskId, taskIndex) for worker/task log
+5. Check the heartbeat of supervisor when topology assignment to ensure no worker will be assigned into a dead supervisor
+6. Add api to query the task/worker's metric info, e.g. load status of task queue, worker cpu usage, worker mem usage...
+7. Try to re-download jars when staring worker fails several times to avoid potential corruption of jars
+8. Add Nimbus ZK cache, accelerate nimbus read zk
+9. Add thrift api getVersion, it will be used check between the client jstorm version and the server jstorm version.
+10. Update the metrics' structure to Alimonitor
+11. Add exclude-jar parameter into jstorm.py, which avoid class conflict when submit topology
+## Bug Fix
+1. Fix the no response problem of supervisor process when subimtting big amout topologys in a short time
+2. When submitting two or more topologys at the same time, the later one might be failed.
+3. TickTuple does not need to be acked. Fix the incorrect count of failure message.
+4. Fix the potential incorrect assignment when use.old.assignment=true
+5. Fix failed to remove some zk nodes when kill topology
+6. Fix failed to restart topology, when nimbus do assignment job.
+7. Fix NPE when register metrics
+8. Fix failed to read ZK monitor znode through zktool
+9. Fix exception when enable classload and local mode
+10. Fix duplicate log when enable user-defined logback in local mode
+## Changed Setting
+1. Set Nimbus jvm memory size as 4G
+2. Set hearbeat from supervisor to nimbus timeout from 60s to 180s
+3. In order to avoid OOM, set storm.messaging.netty.max.pending as 4
+4. Set task queue size as 1024, worker's total send/receive queue size as 2048
+## Deploy and scripts
+1. Add rpm build spec
+2. Add deploy files of jstorm for rpm package building
+3. Enable the cleandisk cronjob every hour, reserve coredump for only one hour.
+
+#Release 0.9.6.3
+## New features
+1. Implement tick tuple
+2. Support logback
+3. Support to load the user defined configuration file of log4j
+4. Enable the display of user defined metrics in web UI
+5. Add "topologyName" parameter for "jstorm list" command
+6. Support the use of ip and hostname at the same for user defined schedule
+7. Support junit test for local mode
+8. Enable client command(e.g. jstorm jar) to load self-defined storm.yaml
+## Bug fix
+1. Add activate and deactivate api of spout, which are used in nextTuple prepare phase
+2. Update the support of multi language
+3. Check the worker's heartbeat asynchronously to speed up the lunch of worker
+4. Add the check of worker's pid to speed up the detect of dead worker
+5. Fix the high cpu load of disruptor producer when disruptor queue is full
+6. Remove the confused exception reported by disruptor queue when killing worker
+7. Fix the failure problem of "jstorm restart" client command
+8. Report error when user submits the jar built on a incompatible jstorm release
+9. Fix the problem that one log will printed twice when user define a configuration of log4j or logback on local mode
+10. Fix the potential exception when killing topology on local mode
+11. Forbid user to change the log level of jstorm log
+12. Add a configuration template of logback
+13. Fix the problem that process the upload of lib jar as application jar
+14. Makesure the clean of ZK node for a topology which is removed
+15. Add the information of topology name when java core dump
+16. Fix the incorrect value of -XX:MaxTenuringThreshold. Currently, the default value of jstorm is 20, but the max value in JDK8 is 15.
+17. Fix the potential reading failure of cpu core number, which may cause the supervisor slot to be set to 0
+18. Fix the "Address family not supported by protocol family" error on local mode
+19. Do not start logview http server on local mode
+20. Add the creation of log dir in supervisor alive checking scription
+21. Check the correctness of ip specified in configuration file before starting nimbus
+22. Check the correctness of env variable $JAVA_HOME/$JSTORM_HOME/$JSTORM_CONF_DIR before starting jstorm service
+23. Specify the log dir for rpm installation
+24. Add reading permission of /home/admin/jstorm and /home/admin/logs for all users after rpm installation
+25. Config local temporay ports when rpm installation
+26. Add noarch rpm package
+
+#Release 0.9.6.2
+1. Add option to switch between BlockingQueue and Disruptor
+2. Fix the bug which under sync netty mode, client failed to send message to server
+3. Fix the bug let web UI can dispaly 0.9.6.1 cluster
+4. Fix the bug topology can be submited without main jar but a lot of little jar
+5. Fix the bug restart command
+6. Fix the bug trident bug
+7. Add the validation of topology name, component name... Only A-Z, a-z, 0-9, '_', '-', '.' are valid now.
+8. Fix the bug close thrift client
+
+#Release 0.9.6.2-rc
+1. Improve user experience from Web UI
+1.1 Add jstack link
+1.2 Add worker log link in supervisor page
+1.3 Add Web UI log encode setting "gbk" or "utf-8"
+1.4 Show starting tasks in component page
+1.5 Show dead task's information in UI
+1.6 Fix the bug that error info can not be displayed in UI when task is restarting
+2. Add restart command, with this command, user can reload configuration, reset worker/task parallism
+3. Upgrade curator/disruptor/guava version
+4. Revert json lib to google-simple json, wrap all json operation into two utility method
+5. Add new storm submit api, supporting submit topology under java
+6. Enable launch process with backend method
+7. Set "spout.pending.full.sleep" default value as true
+8. Fix the bug user define sceduler not support a list of workers
+9. Add disruptor/JStormUtils junit test
+10. Enable user to configure the name of monitor name of alimonitor
+11. Add tcp option "reuseAddress" in netty framework
+12. Fix the bug: When spout does not implement the ICommitterTrident interface, MasterCoordinatorSpout will stick on commit phase.
+
+#Release 0.9.6.2-rc
+1. Improve user experience from Web UI
+1.1 Add jstack link
+1.2 Add worker log link in supervisor page
+1.3 Add Web UI log encode setting "gbk" or "utf-8"
+1.4 Show starting tasks in component page
+1.5 Show dead task's information in UI
+1.6 Fix the bug that error info can not be displayed in UI when task is restarting
+2. Add restart command, with this command, user can reload configuration, reset worker/task parallism
+3. Upgrade curator/disruptor/guava version
+4. Revert json lib to google-simple json, wrap all json operation into two utility method
+5. Add new storm submit api, supporting submit topology under java
+6. Enable launch process with backend method
+7. Set "spout.pending.full.sleep" default value as true
+8. Fix the bug user define sceduler not support a list of workers
+9. Add disruptor/JStormUtils junit test
+10. Enable user to configure the name of monitor name of alimonitor
+11. Add tcp option "reuseAddress" in netty framework
+12. Fix the bug: When spout does not implement the ICommitterTrident interface, MasterCoordinatorSpout will stick on commit phase.
+
+#Release 0.9.6.1
+1. Add management of multiclusters to Web UI. Added management tools for multiclusters in WebUI.
+2. Merged Trident API from storm-0.9.3
+3. Replaced gson with fastjson
+4. Refactored metric json generation code.
+5. Stored version info with $JSTORM_HOME/RELEASE.
+6. Replaced SingleThreadDisruptorQueue with MultiThreadDisruptorQueue in task deserialize thread.
+7. Fixed issues with worker count on Web UI.
+8. Fixed issues with accessing the task map with multi-threads.
+9. Fixed NullPointerException while killing worker and reading worker's hearbeat object.
+10. Netty client connect to server only in NettyClient module.
+11. Add break loop operation when netty client connection is closed
+12. Fix the bug that topology warning flag present in cluster page is not consistent with error information present in topology page
+13. Add recovery function when the data of task error information is corrupted
+14. Fix the bug that the metric data can not be uploaded onto Alimonitor when ugrading from pre-0.9.6 to 0.9.6 and executing pkill java without restart the topologying
+15. Fix the bug that zeroMq failed to receive data
+16. Add interface to easily setting worker's memory
+17. Set default value of topology.alimonitor.metrics.post to false
+18. Only start NETTY_SERVER_DECODE_TIME for netty server
+19. Keep compatible with Storm for local mode
+20. Print rootId when tuple failed
+21. In order to keep compatible with Storm, add submitTopologyWithProgressBar interface
+22. Upgrade netty version from 3.2.7 to 3.9.0
+23. Support assign topology to user-defined supervisors
+
+
+#Release 0.9.6
+1. Update UI
+ - Display the metrics information of task and worker
+ - Add warning flag when errors occur for a topology
+ - Add link from supervisor page to task page
+2. Send metrics data to Alimonitor
+3. Add metrics interface for user
+4. Add task.cleanup.timeout.sec setting to let task gently cleanup
+5. Set the worker's log name as topologyName-worker-port.log
+6. Add setting "worker.redirect.output.file", so worker can redirect System.out/System.err to one setting file
+7. Add storm list command
+8. Add closing channel check in netty client to avoid double close
+9. Add connecting check in netty client to avoid connecting one server twice at one time
+
+#Release 0.9.5.1
+1. Add netty sync mode
+2. Add block operation in netty async mode
+3. Replace exception with Throwable in executor layer
+4. Upgrade curator-framework version from 1.15 to 1.3.2
+5. Add more netty junit test
+6. Add log when queue is full
+
+#Release 0.9.5
+##Big feature:
+1. Redesign scheduler arithmetic, basing worker not task .
+
+## Bug fix
+1. Fix disruptor use too much cpu
+2. Add target NettyServer log when f1ail to send data by netty
+
+#Release 0.9.4.1
+##Bug fix:
+1. Improve speed between tasks who is running in one worker
+2. Fix wrong timeout seconds
+3. Add checking port when worker initialize and begin to kill old worker
+4. Move worker hearbeat thread before initializing tasks
+5. Move init netty-server before initializeing tasks
+6. Check whether tuple's rootId is duplicated
+7. Add default value into Utils.getInt
+8. Add result function in ReconnectRunnable
+9. Add operation to start Timetick
+10. Halt process when master nimbus lost ZK node
+11. Add exception catch when cgroups kill process
+12. Speed up reconnect to netty-server
+13. Share one task hearbeat thread for all tasks
+14. Quickly haltprocess when initialization failed.
+15. Check web-ui logview page size
+
+
+
+#Release 0.9.4
+
+## Big features
+1. Add transaction programming mode
+2. Rewrite netty code, 1. use share boss/worker thread pool;2 async send batch tuples;3 single thread to do reconnect job;4 receive batch tuples
+3. Add metrics and statics
+4. Merge Alimama storm branch into this version, submit jar with -conf, -D, -lib
+
+
+## Enhancement
+1. add setting when supervisor has been shutdown, worker will shutdown automatically
+2. add LocalFristGrouping api
+3. enable cgroup for normal user
+
+
+
+##Bug fix:
+1. Setting buffer size when upload jar
+2. Add lock between ZK watch and timer thread when refresh connection
+3. Enable nimbus monitor thread only when topology is running in cluster mode
+4. Fix exception when failed to read old assignment of ZK
+5. classloader fix when both parent and current classloader load the same class
+6. Fix log view null pointer exception
+
+#Release 0.9.3.1
+
+## Enhancement
+1. switch apache thrift7 to storm thrift7
+2. set defatult acker number is 1
+3. add "spout.single.thread" setting
+4. make nimbus logview port different from supervisor's
+5. web ui can list all files of log's subdir
+6. Set gc dump dir as log's dir
+
+
+#Release 0.9.3
+## New feature
+1. Support Aliyun Apsara/Hadoop Yarn
+
+## Enhancement
+1. Redesign Logview
+2. Kill old worker under the same port when worker is starting
+3. Add zk information/version information on UI
+4. Add nodeport information for dead task in nimbus
+5. Add interface to get values when spout doing ack
+6. Add timeout statics in bolt
+7. jstorm script return status
+8. Add logs when fail to deserialize tuple
+9. Skip sleep operation when max_pending is 1 and waiting ack
+10. Remove useless dependency
+11. Longer task timeout setting
+12. Add supervisor.use.ip setting
+13. Redirect supervisor out/err to /dev/null, redirect worker out/err to one file
+
+
+## Bug Fix
+1. Fix kryo fail to deserialize object when enable classloader
+2. Fix fail to reassign dead task when worker number is less than topology apply
+3. Set samller jvm heap memory for jstorm-client
+4. Fix fail to set topology status as active when do rebalance operation twice at one time,
+5. Fix local mode bug under linux
+6. Fix average latency isn't accurate
+7. GC tuning.
+8. Add default kill function for AysncLoopRunnable
+
+
+
+#Release 0.9.2
+## New feature
+1. Support LocalCluster/LocalDrpc mode, support debugging topology under local mode
+2. Support CGroups, assigning CPU in hardware level.
+3. Support simple logview
+
+## Bug fix or enhancement
+1. Change SpoutExecutor's RotatingMap to TimeCacheMap, when putting too much timeout tuple is easy to cause deadlock in spout acker thread
+2. Tunning gc parameter, improve performance and avoid full GC
+3. Improve Topology's own gc priority, make it higher than JStorm system setting.
+4. Tuning Nimbus HA, switch nimbus faster, when occur nimbus failure.
+5. Fix bugs found by FindBugs tool.
+6. Revert Trident interface to 0.8.1, due to 0.8.1's trident interface's performance is better.
+7. Setting nimbus.task.timeout.secs as 60 to avoid nimbus doing assignment when task is under full gc.
+8. Setting default rpc framework as netty
+9. Tunning nimbus shutdown flow
+10. Tunning worker shutdown flow
+11. Add task heartbeat log
+12. Optimize Drpc/LocalDrpc source code.
+13. Move classloader to client jar.
+14 Fix classloader fail to load anonymous class
+15. Web Ui display slave nimbus
+16. Add thrift max read buffer size
+17. Setting CPU slot base double
+18. Move Zk utility to jstorm-client-extension.jar
+19. Fix localOrShuffle null pointer
+20. Redirecting worker's System.out/System.err to file is configurable.
+21. Add new RPC frameworker JeroMq
+22. Fix Zk watcher miss problem
+23. Update sl4j 1.5.6 to 1.7.5
+24. Shutdown worker when occur exception in Smart thread
+25. Skip downloading useless topology in Supervisor
+26. Redownload the topology when failed to deserialize topology in Supervisor.
+27. Fix topology codeDir as resourceDir
+28. Catch error when normalize topology
+29. Add log when found one task is dead
+30. Add maven repository, JStorm is able to build outside of Alibaba
+31. Fix localOrShuffle null pointer exception
+32. Add statics counting for internal tuples in one worker
+33. Add thrift.close after download topology binary in Supervisor
+
+
+# Release 0.9.1
+
+## new features
+1. Application classloader. when Application jar is conflict with jstorm jar,
+ please enable application classloader.
+2. Group Quato, Different group with different resource quato.
+
+## Bug fix or enhancement
+1. Fix Rotation Map competition issue.
+2. Set default acker number as 0
+3. Set default spout/bolt number as 1
+4. Add log directory in log4j configuration file
+5. Add transaction example
+6. Fix UI showing wrong worker numbe in topology page
+7. Fix UI showing wrong latency in topology page
+8. Replace hardcode Integer convert with JStormUtils.parseInt
+9. Support string parse in Utils.getInt
+10. Remove useless dependency in pom.xml
+11. Support supervisor using IP or special hostname
+12. Add more details when no resource has been assigned to one new topology
+13. Replace normal thread with Smart thread
+14. Add gc details
+15. Code format
+16. Unify stormId and topologyId as topologyId
+17. Every nimbus will regist ip to ZK
+
+
+
+# Release 0.9.0
+In this version, it will follow storm 0.9.0 interface, so the application running
+on storm 0.9.0 can run in jstorm 0.9.0 without any change.
+
+## Stability
+1. provide nimbus HA. when the master nimbus shuts down, it will select another
+ online nimbus to be the master. There is only one master nimbus online
+ any time and the slave nimbuses just synchronouse the master's data.
+2. RPC through netty is stable, the sending speed is match with receiving speed.
+
+
+## Powerful scheduler
+1. Assigning resource on four dimensions:cpu, mem, disk, net
+2. Application can use old assignment.
+3. Application can use user-define resource.
+4. Task can apply extra cpu slot or memory slot.
+4. Application can force tasks run on different supervisor or the same supervisor
+
+
+
+
+
+
+
+
+# Release 0.7.1
+In this version, it will follow storm 0.7.1 interface, so the topology running
+in storm 0.7.1 can run in jstorm without any change.
+
+## Stability
+* Assign workers in balance
+* add setting "zmq.max.queue.msg" for zeromq
+* communication between worker and tasks without zeromq
+* Add catch exception operation
+ * in supervisor SyncProcess/SyncSupervisor
+ * add catch exception and report_error in spout's open and bolt's prepare
+ * in all IO operation
+ * in all serialize/deserialize
+ * in all ZK operation
+ * in topology upload/download function
+ * during initialization zeromq
+* do assignmen/reassignment operation in one thread to avoid competition
+* redesign nimbus 's topology assign algorithm, make the logic simple much.
+* redesign supervisor's sync assignment algorithm, make the logic simple much
+* reduce zookeeper load
+ * redesign nimbus monitor logic, it will just scan tasks' hearbeat, frequency is 10s
+ * nimbus cancel watch on supervisor
+ * supervisor heartbeat frequence change to 10s
+ * supervisor syncSupervisor/syncProcess frequence change to 10s
+ * supervisor scan /$(ZKROOT)/assignment only once in one monitor loop
+ * task hearbeat change to 10s
+* create task pid file before connection zk, this is very import when zk is unstable.
+
+
+## Performance tuning
+* reduce once memory copy when deserialize tuple, improve performance huge.
+* split executor thread as two thread, one handing receive tuples, one sending tuples, improve performance much
+* redeisign sample code, it will sampling every 5 seconds, not every 20 tuple once, improve performance much
+* simplify the ack's logic, make acker more effeciency
+* Communication between worker and tasks won't use zeromq, just memory share in process
+* in worker's Drainer/virtualportdispatch thread, spout/bolt recv/send thread,
+ the thread will sleep 1 ms when there is not tuple in one loop
+* communication between worker and tasks without zeromq
+* sampling frequence change to 5s, not every 20 tuple once.
+
+## Enhancement:
+* add IFailValueSpout interface
+* Redesign sampling code, collection statics model become more common.
+ * Add sending/recving tps statics, statics is more precise.
+* Atomatically do deactivate action when kill/rebalance topology, and the wait time is 2 * MSG_TIMEOUT
+* fix nongrouping bug, random.nextInt will generate value less than 0.
+* Sleep one setting time(default is 1 minute) after finish spout open,
+ which is used to wait other task finish initialization.
+* Add check component name when submit topology, forbidding the component
+ which name start with "__"
+* change the zk's node /$(ZKROOT)/storm to /$(ZKROOT)/topology
+* abstract topology check logic from generating real topology function
+* when supervisor is down and topology do rebalance, the alive task under down
+ supervisor is unavailable.
+* add close connection operation after finish download topology binary
+* automatically create all local dirtorie, such as
+ /$(LOCALDIR)/supervisor/localstate
+* when killing worker, add "kill and sleep " operation before "kill -9" operation
+* when generate real topology binary,
+ * configuration priority different.
+ component configuration > topology configuration > system configuration
+ * skip the output stream which target component doesn't exist.
+ * skip the component whose parallism is 0.
+ * component's parallism is less than 0, throw exception.
+* skip ack/fail when inputstream setting is empty
+* add topology name to the log
+* fix ui select option error, default is 10 minutes
* supervisor can display all worker's status
\ No newline at end of file
[43/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java b/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
deleted file mode 100644
index 67bf325..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
+++ /dev/null
@@ -1,378 +0,0 @@
-package backtype.storm.drpc;
-
-import backtype.storm.Constants;
-import backtype.storm.ILocalDRPC;
-import backtype.storm.coordination.BatchBoltExecutor;
-import backtype.storm.coordination.CoordinatedBolt;
-import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
-import backtype.storm.coordination.CoordinatedBolt.IdStreamSpec;
-import backtype.storm.coordination.CoordinatedBolt.SourceArgs;
-import backtype.storm.coordination.IBatchBolt;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.BaseConfigurationDeclarer;
-import backtype.storm.topology.BasicBoltExecutor;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.InputDeclarer;
-import backtype.storm.topology.OutputFieldsGetter;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-// Trident subsumes the functionality provided by this class, so it's deprecated
-@Deprecated
-public class LinearDRPCTopologyBuilder {
- String _function;
- List<Component> _components = new ArrayList<Component>();
-
- public LinearDRPCTopologyBuilder(String function) {
- _function = function;
- }
-
- public LinearDRPCInputDeclarer addBolt(IBatchBolt bolt, Number parallelism) {
- return addBolt(new BatchBoltExecutor(bolt), parallelism);
- }
-
- public LinearDRPCInputDeclarer addBolt(IBatchBolt bolt) {
- return addBolt(bolt, 1);
- }
-
- @Deprecated
- public LinearDRPCInputDeclarer addBolt(IRichBolt bolt, Number parallelism) {
- if (parallelism == null)
- parallelism = 1;
- Component component = new Component(bolt, parallelism.intValue());
- _components.add(component);
- return new InputDeclarerImpl(component);
- }
-
- @Deprecated
- public LinearDRPCInputDeclarer addBolt(IRichBolt bolt) {
- return addBolt(bolt, null);
- }
-
- public LinearDRPCInputDeclarer addBolt(IBasicBolt bolt, Number parallelism) {
- return addBolt(new BasicBoltExecutor(bolt), parallelism);
- }
-
- public LinearDRPCInputDeclarer addBolt(IBasicBolt bolt) {
- return addBolt(bolt, null);
- }
-
- public StormTopology createLocalTopology(ILocalDRPC drpc) {
- return createTopology(new DRPCSpout(_function, drpc));
- }
-
- public StormTopology createRemoteTopology() {
- return createTopology(new DRPCSpout(_function));
- }
-
- private StormTopology createTopology(DRPCSpout spout) {
- final String SPOUT_ID = "spout";
- final String PREPARE_ID = "prepare-request";
-
- TopologyBuilder builder = new TopologyBuilder();
- builder.setSpout(SPOUT_ID, spout);
- builder.setBolt(PREPARE_ID, new PrepareRequest())
- .noneGrouping(SPOUT_ID);
- int i = 0;
- for (; i < _components.size(); i++) {
- Component component = _components.get(i);
-
- Map<String, SourceArgs> source = new HashMap<String, SourceArgs>();
- if (i == 1) {
- source.put(boltId(i - 1), SourceArgs.single());
- } else if (i >= 2) {
- source.put(boltId(i - 1), SourceArgs.all());
- }
- IdStreamSpec idSpec = null;
- if (i == _components.size() - 1
- && component.bolt instanceof FinishedCallback) {
- idSpec = IdStreamSpec.makeDetectSpec(PREPARE_ID,
- PrepareRequest.ID_STREAM);
- }
- BoltDeclarer declarer = builder.setBolt(boltId(i),
- new CoordinatedBolt(component.bolt, source, idSpec),
- component.parallelism);
-
- for (Map conf : component.componentConfs) {
- declarer.addConfigurations(conf);
- }
-
- if (idSpec != null) {
- declarer.fieldsGrouping(idSpec.getGlobalStreamId()
- .get_componentId(), PrepareRequest.ID_STREAM,
- new Fields("request"));
- }
- if (i == 0 && component.declarations.isEmpty()) {
- declarer.noneGrouping(PREPARE_ID, PrepareRequest.ARGS_STREAM);
- } else {
- String prevId;
- if (i == 0) {
- prevId = PREPARE_ID;
- } else {
- prevId = boltId(i - 1);
- }
- for (InputDeclaration declaration : component.declarations) {
- declaration.declare(prevId, declarer);
- }
- }
- if (i > 0) {
- declarer.directGrouping(boltId(i - 1),
- Constants.COORDINATED_STREAM_ID);
- }
- }
-
- IRichBolt lastBolt = _components.get(_components.size() - 1).bolt;
- OutputFieldsGetter getter = new OutputFieldsGetter();
- lastBolt.declareOutputFields(getter);
- Map<String, StreamInfo> streams = getter.getFieldsDeclaration();
- if (streams.size() != 1) {
- throw new RuntimeException(
- "Must declare exactly one stream from last bolt in LinearDRPCTopology");
- }
- String outputStream = streams.keySet().iterator().next();
- List<String> fields = streams.get(outputStream).get_output_fields();
- if (fields.size() != 2) {
- throw new RuntimeException(
- "Output stream of last component in LinearDRPCTopology must contain exactly two fields. The first should be the request id, and the second should be the result.");
- }
-
- builder.setBolt(boltId(i), new JoinResult(PREPARE_ID))
- .fieldsGrouping(boltId(i - 1), outputStream,
- new Fields(fields.get(0)))
- .fieldsGrouping(PREPARE_ID, PrepareRequest.RETURN_STREAM,
- new Fields("request"));
- i++;
- builder.setBolt(boltId(i), new ReturnResults()).noneGrouping(
- boltId(i - 1));
- return builder.createTopology();
- }
-
- private static String boltId(int index) {
- return "bolt" + index;
- }
-
- private static class Component {
- public IRichBolt bolt;
- public int parallelism;
- public List<Map> componentConfs;
- public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
-
- public Component(IRichBolt bolt, int parallelism) {
- this.bolt = bolt;
- this.parallelism = parallelism;
- this.componentConfs = new ArrayList();
- }
- }
-
- private static interface InputDeclaration {
- public void declare(String prevComponent, InputDeclarer declarer);
- }
-
- private class InputDeclarerImpl extends
- BaseConfigurationDeclarer<LinearDRPCInputDeclarer> implements
- LinearDRPCInputDeclarer {
- Component _component;
-
- public InputDeclarerImpl(Component component) {
- _component = component;
- }
-
- @Override
- public LinearDRPCInputDeclarer fieldsGrouping(final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.fieldsGrouping(prevComponent, fields);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer fieldsGrouping(final String streamId,
- final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.fieldsGrouping(prevComponent, streamId, fields);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer globalGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.globalGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer globalGrouping(final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.globalGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer shuffleGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.shuffleGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer shuffleGrouping(final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.shuffleGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer localOrShuffleGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer localOrShuffleGrouping(
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer noneGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.noneGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer noneGrouping(final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.noneGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer allGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.allGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer allGrouping(final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.allGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer directGrouping() {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.directGrouping(prevComponent);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer directGrouping(final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.directGrouping(prevComponent, streamId);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer customGrouping(
- final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.customGrouping(prevComponent, grouping);
- }
- });
- return this;
- }
-
- @Override
- public LinearDRPCInputDeclarer customGrouping(final String streamId,
- final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(String prevComponent, InputDeclarer declarer) {
- declarer.customGrouping(prevComponent, streamId, grouping);
- }
- });
- return this;
- }
-
- private void addDeclaration(InputDeclaration declaration) {
- _component.declarations.add(declaration);
- }
-
- @Override
- public LinearDRPCInputDeclarer addConfigurations(Map conf) {
- _component.componentConfs.add(conf);
- return this;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/PrepareRequest.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/PrepareRequest.java b/jstorm-client/src/main/java/backtype/storm/drpc/PrepareRequest.java
deleted file mode 100644
index 95b92fb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/PrepareRequest.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package backtype.storm.drpc;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.Map;
-import java.util.Random;
-import backtype.storm.utils.Utils;
-
-public class PrepareRequest extends BaseBasicBolt {
- public static final String ARGS_STREAM = Utils.DEFAULT_STREAM_ID;
- public static final String RETURN_STREAM = "ret";
- public static final String ID_STREAM = "id";
-
- Random rand;
-
- @Override
- public void prepare(Map map, TopologyContext context) {
- rand = new Random();
- }
-
- @Override
- public void execute(Tuple tuple, BasicOutputCollector collector) {
- String args = tuple.getString(0);
- String returnInfo = tuple.getString(1);
- long requestId = rand.nextLong();
- collector.emit(ARGS_STREAM, new Values(requestId, args));
- collector.emit(RETURN_STREAM, new Values(requestId, returnInfo));
- collector.emit(ID_STREAM, new Values(requestId));
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(ARGS_STREAM, new Fields("request", "args"));
- declarer.declareStream(RETURN_STREAM, new Fields("request", "return"));
- declarer.declareStream(ID_STREAM, new Fields("request"));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/ReturnResults.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/ReturnResults.java b/jstorm-client/src/main/java/backtype/storm/drpc/ReturnResults.java
deleted file mode 100644
index 1b9a15b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/ReturnResults.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package backtype.storm.drpc;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.thrift7.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
-import backtype.storm.generated.DistributedRPCInvocations;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.ServiceRegistry;
-import backtype.storm.utils.Utils;
-
-public class ReturnResults extends BaseRichBolt {
- public static final Logger LOG = LoggerFactory
- .getLogger(ReturnResults.class);
- OutputCollector _collector;
- boolean local;
-
- Map<List, DRPCInvocationsClient> _clients = new HashMap<List, DRPCInvocationsClient>();
-
- @Override
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _collector = collector;
- local = stormConf.get(Config.STORM_CLUSTER_MODE).equals("local");
- }
-
- @Override
- public void execute(Tuple input) {
- String result = (String) input.getValue(0);
- String returnInfo = (String) input.getValue(1);
- if (returnInfo != null) {
- Map retMap = (Map) Utils.from_json(returnInfo);
- final String host = (String) retMap.get("host");
- final int port = Utils.getInt(retMap.get("port"));
- String id = (String) retMap.get("id");
- DistributedRPCInvocations.Iface client;
- if (local) {
- client = (DistributedRPCInvocations.Iface) ServiceRegistry
- .getService(host);
- } else {
- List server = new ArrayList() {
- {
- add(host);
- add(port);
- }
- };
-
- if (!_clients.containsKey(server)) {
- _clients.put(server, new DRPCInvocationsClient(host, port));
- }
- client = _clients.get(server);
- }
-
- try {
- client.result(id, result);
- _collector.ack(input);
- } catch (TException e) {
- LOG.error("Failed to return results to DRPC server", e);
- _collector.fail(input);
- }
- }
- }
-
- @Override
- public void cleanup() {
- for (DRPCInvocationsClient c : _clients.values()) {
- c.close();
- }
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/AlreadyAliveException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/AlreadyAliveException.java b/jstorm-client/src/main/java/backtype/storm/generated/AlreadyAliveException.java
deleted file mode 100644
index 9e1d607..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/AlreadyAliveException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class AlreadyAliveException extends Exception implements org.apache.thrift7.TBase<AlreadyAliveException, AlreadyAliveException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("AlreadyAliveException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(AlreadyAliveException.class, metaDataMap);
- }
-
- public AlreadyAliveException() {
- }
-
- public AlreadyAliveException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public AlreadyAliveException(AlreadyAliveException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public AlreadyAliveException deepCopy() {
- return new AlreadyAliveException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof AlreadyAliveException)
- return this.equals((AlreadyAliveException)that);
- return false;
- }
-
- public boolean equals(AlreadyAliveException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(AlreadyAliveException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- AlreadyAliveException typedOther = (AlreadyAliveException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("AlreadyAliveException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/AuthorizationException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/AuthorizationException.java b/jstorm-client/src/main/java/backtype/storm/generated/AuthorizationException.java
deleted file mode 100644
index 9cd7e52..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/AuthorizationException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class AuthorizationException extends Exception implements org.apache.thrift7.TBase<AuthorizationException, AuthorizationException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("AuthorizationException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(AuthorizationException.class, metaDataMap);
- }
-
- public AuthorizationException() {
- }
-
- public AuthorizationException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public AuthorizationException(AuthorizationException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public AuthorizationException deepCopy() {
- return new AuthorizationException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof AuthorizationException)
- return this.equals((AuthorizationException)that);
- return false;
- }
-
- public boolean equals(AuthorizationException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(AuthorizationException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- AuthorizationException typedOther = (AuthorizationException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("AuthorizationException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/Bolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/Bolt.java b/jstorm-client/src/main/java/backtype/storm/generated/Bolt.java
deleted file mode 100644
index 44bf514..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/Bolt.java
+++ /dev/null
@@ -1,427 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Bolt implements org.apache.thrift7.TBase<Bolt, Bolt._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("Bolt");
-
- private static final org.apache.thrift7.protocol.TField BOLT_OBJECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("bolt_object", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
- private static final org.apache.thrift7.protocol.TField COMMON_FIELD_DESC = new org.apache.thrift7.protocol.TField("common", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
-
- private ComponentObject bolt_object; // required
- private ComponentCommon common; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- BOLT_OBJECT((short)1, "bolt_object"),
- COMMON((short)2, "common");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // BOLT_OBJECT
- return BOLT_OBJECT;
- case 2: // COMMON
- return COMMON;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.BOLT_OBJECT, new org.apache.thrift7.meta_data.FieldMetaData("bolt_object", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentObject.class)));
- tmpMap.put(_Fields.COMMON, new org.apache.thrift7.meta_data.FieldMetaData("common", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentCommon.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(Bolt.class, metaDataMap);
- }
-
- public Bolt() {
- }
-
- public Bolt(
- ComponentObject bolt_object,
- ComponentCommon common)
- {
- this();
- this.bolt_object = bolt_object;
- this.common = common;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public Bolt(Bolt other) {
- if (other.is_set_bolt_object()) {
- this.bolt_object = new ComponentObject(other.bolt_object);
- }
- if (other.is_set_common()) {
- this.common = new ComponentCommon(other.common);
- }
- }
-
- public Bolt deepCopy() {
- return new Bolt(this);
- }
-
- @Override
- public void clear() {
- this.bolt_object = null;
- this.common = null;
- }
-
- public ComponentObject get_bolt_object() {
- return this.bolt_object;
- }
-
- public void set_bolt_object(ComponentObject bolt_object) {
- this.bolt_object = bolt_object;
- }
-
- public void unset_bolt_object() {
- this.bolt_object = null;
- }
-
- /** Returns true if field bolt_object is set (has been assigned a value) and false otherwise */
- public boolean is_set_bolt_object() {
- return this.bolt_object != null;
- }
-
- public void set_bolt_object_isSet(boolean value) {
- if (!value) {
- this.bolt_object = null;
- }
- }
-
- public ComponentCommon get_common() {
- return this.common;
- }
-
- public void set_common(ComponentCommon common) {
- this.common = common;
- }
-
- public void unset_common() {
- this.common = null;
- }
-
- /** Returns true if field common is set (has been assigned a value) and false otherwise */
- public boolean is_set_common() {
- return this.common != null;
- }
-
- public void set_common_isSet(boolean value) {
- if (!value) {
- this.common = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case BOLT_OBJECT:
- if (value == null) {
- unset_bolt_object();
- } else {
- set_bolt_object((ComponentObject)value);
- }
- break;
-
- case COMMON:
- if (value == null) {
- unset_common();
- } else {
- set_common((ComponentCommon)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case BOLT_OBJECT:
- return get_bolt_object();
-
- case COMMON:
- return get_common();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case BOLT_OBJECT:
- return is_set_bolt_object();
- case COMMON:
- return is_set_common();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof Bolt)
- return this.equals((Bolt)that);
- return false;
- }
-
- public boolean equals(Bolt that) {
- if (that == null)
- return false;
-
- boolean this_present_bolt_object = true && this.is_set_bolt_object();
- boolean that_present_bolt_object = true && that.is_set_bolt_object();
- if (this_present_bolt_object || that_present_bolt_object) {
- if (!(this_present_bolt_object && that_present_bolt_object))
- return false;
- if (!this.bolt_object.equals(that.bolt_object))
- return false;
- }
-
- boolean this_present_common = true && this.is_set_common();
- boolean that_present_common = true && that.is_set_common();
- if (this_present_common || that_present_common) {
- if (!(this_present_common && that_present_common))
- return false;
- if (!this.common.equals(that.common))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_bolt_object = true && (is_set_bolt_object());
- builder.append(present_bolt_object);
- if (present_bolt_object)
- builder.append(bolt_object);
-
- boolean present_common = true && (is_set_common());
- builder.append(present_common);
- if (present_common)
- builder.append(common);
-
- return builder.toHashCode();
- }
-
- public int compareTo(Bolt other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- Bolt typedOther = (Bolt)other;
-
- lastComparison = Boolean.valueOf(is_set_bolt_object()).compareTo(typedOther.is_set_bolt_object());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_bolt_object()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.bolt_object, typedOther.bolt_object);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_common()).compareTo(typedOther.is_set_common());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_common()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.common, typedOther.common);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // BOLT_OBJECT
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.bolt_object = new ComponentObject();
- this.bolt_object.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // COMMON
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.common = new ComponentCommon();
- this.common.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.bolt_object != null) {
- oprot.writeFieldBegin(BOLT_OBJECT_FIELD_DESC);
- this.bolt_object.write(oprot);
- oprot.writeFieldEnd();
- }
- if (this.common != null) {
- oprot.writeFieldBegin(COMMON_FIELD_DESC);
- this.common.write(oprot);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("Bolt(");
- boolean first = true;
-
- sb.append("bolt_object:");
- if (this.bolt_object == null) {
- sb.append("null");
- } else {
- sb.append(this.bolt_object);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("common:");
- if (this.common == null) {
- sb.append("null");
- } else {
- sb.append(this.common);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_bolt_object()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'bolt_object' is unset! Struct:" + toString());
- }
-
- if (!is_set_common()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'common' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[54/60] [abbrv] storm git commit: removed jstorm-on-yarn subdirectory
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Util.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Util.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Util.java
deleted file mode 100644
index 43ae410..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Util.java
+++ /dev/null
@@ -1,399 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.io.OutputStreamWriter;
-import java.net.URL;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.Iterator;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Scanner;
-import java.util.Set;
-import java.util.jar.JarEntry;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.fs.permission.FsAction;
-import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.yarn.api.ApplicationConstants;
-import org.apache.hadoop.yarn.api.records.LocalResource;
-import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.util.Records;
-import org.yaml.snakeyaml.Yaml;
-
-import com.google.common.base.Joiner;
-
-public class Util {
-
- private static final String JSTORM_CONF_PATH_STRING = "conf" + Path.SEPARATOR + "storm.yaml";
-
- static String getJStormHome() {
-// String ret = System.getenv().get("JSTORM_HOME");
- String ret = System.getProperty("jstorm.home");
- if (ret == null) {
- throw new RuntimeException("jstorm.home is not set");
- }
- return ret;
- }
-
- @SuppressWarnings("rawtypes")
- static Version getJStormVersion() throws IOException {
-
- String versionNumber = "Unknown";
-// String versionNumber = "0.9.3.1";
- System.out.println(getJStormHome());
- File releaseFile = new File(getJStormHome(), "RELEASE");
- if (releaseFile.exists()) {
- BufferedReader reader = new BufferedReader(new FileReader(releaseFile));
- try {
- versionNumber = reader.readLine().trim();
- } finally {
- reader.close();
- }
- }
-
- File buildFile = new File(getJStormHome(), "BUILD");
- String buildNumber = null;
- if (buildFile.exists()) {
- BufferedReader reader = new BufferedReader(new FileReader(buildFile));
- try {
- buildNumber = reader.readLine().trim();
- } finally {
- reader.close();
- }
- }
-
- Version version = new Version(versionNumber, buildNumber);
- return version;
- }
-
- static String getJStormHomeInZip(FileSystem fs, Path zip, String jstormVersion) throws IOException, RuntimeException {
- FSDataInputStream fsInputStream = fs.open(zip);
- ZipInputStream zipInputStream = new ZipInputStream(fsInputStream);
- ZipEntry entry = zipInputStream.getNextEntry();
- while (entry != null) {
- String entryName = entry.getName();
- if (entryName.matches("^jstorm(-" + jstormVersion + ")?/")) {
- fsInputStream.close();
- return entryName.replace("/", "");
- }
- entry = zipInputStream.getNextEntry();
- }
- fsInputStream.close();
- throw new RuntimeException("Can not find jstorm home entry in jstorm zip file.");
- }
-
- static LocalResource newYarnAppResource(FileSystem fs, Path path,
- LocalResourceType type, LocalResourceVisibility vis) throws IOException {
- Path qualified = fs.makeQualified(path);
- FileStatus status = fs.getFileStatus(qualified);
- LocalResource resource = Records.newRecord(LocalResource.class);
- resource.setType(type);
- resource.setVisibility(vis);
- resource.setResource(ConverterUtils.getYarnUrlFromPath(qualified));
- resource.setTimestamp(status.getModificationTime());
- resource.setSize(status.getLen());
- return resource;
- }
-
- @SuppressWarnings("rawtypes")
- static void rmNulls(Map map) {
- Set s = map.entrySet();
- Iterator it = s.iterator();
- while (it.hasNext()) {
- Map.Entry m =(Map.Entry)it.next();
- if (m.getValue() == null)
- it.remove();
- }
- }
-
- @SuppressWarnings("rawtypes")
- static Path createConfigurationFileInFs(FileSystem fs,
- String appHome, Map stormConf, YarnConfiguration yarnConf)
- throws IOException {
- // dump stringwriter's content into FS conf/storm.yaml
- Path confDst = new Path(fs.getHomeDirectory(),
- appHome + Path.SEPARATOR + JSTORM_CONF_PATH_STRING);
- Path dirDst = confDst.getParent();
- fs.mkdirs(dirDst);
-
- //storm.yaml
- FSDataOutputStream out = fs.create(confDst);
- Yaml yaml = new Yaml();
- OutputStreamWriter writer = new OutputStreamWriter(out);
- rmNulls(stormConf);
- yaml.dump(stormConf, writer);
- writer.close();
- out.close();
-
- //yarn-site.xml
- Path yarn_site_xml = new Path(dirDst, "yarn-site.xml");
- out = fs.create(yarn_site_xml);
- writer = new OutputStreamWriter(out);
- yarnConf.writeXml(writer);
- writer.close();
- out.close();
-
- //logback.xml
- Path logback_xml = new Path(dirDst, "logback.xml");
- out = fs.create(logback_xml);
- CreateLogbackXML(out);
- out.close();
-
- return dirDst;
- }
-
- static LocalResource newYarnAppResource(FileSystem fs, Path path)
- throws IOException {
- return Util.newYarnAppResource(fs, path, LocalResourceType.FILE,
- LocalResourceVisibility.APPLICATION);
- }
-
- private static void CreateLogbackXML(OutputStream out) throws IOException {
- Enumeration<URL> logback_xml_urls;
- logback_xml_urls = Thread.currentThread().getContextClassLoader().getResources("logback.xml");
- while (logback_xml_urls.hasMoreElements()) {
- URL logback_xml_url = logback_xml_urls.nextElement();
- if (logback_xml_url.getProtocol().equals("file")) {
- //Case 1: logback.xml as simple file
- FileInputStream is = new FileInputStream(logback_xml_url.getPath());
- while (is.available() > 0) {
- out.write(is.read());
- }
- is.close();
- return;
- }
- if (logback_xml_url.getProtocol().equals("jar")) {
- //Case 2: logback.xml included in a JAR
- String path = logback_xml_url.getPath();
- String jarFile = path.substring("file:".length(), path.indexOf("!"));
- java.util.jar.JarFile jar = new java.util.jar.JarFile(jarFile);
- Enumeration<JarEntry> enums = jar.entries();
- while (enums.hasMoreElements()) {
- java.util.jar.JarEntry file = enums.nextElement();
- if (!file.isDirectory() && file.getName().equals("logback.xml")) {
- InputStream is = jar.getInputStream(file); // get the input stream
- while (is.available() > 0) {
- out.write(is.read());
- }
- is.close();
- jar.close();
- return;
- }
- }
- jar.close();
- }
- }
-
- throw new IOException("Failed to locate a logback.xml");
- }
-
- @SuppressWarnings("rawtypes")
- private static List<String> buildCommandPrefix(Map conf, String childOptsKey)
- throws IOException {
- String jstormHomePath = getJStormHome();
- List<String> toRet = new ArrayList<String>();
- if (System.getenv("JAVA_HOME") != null)
- toRet.add(System.getenv("JAVA_HOME") + "/bin/java");
- else
- toRet.add("java");
- toRet.add("-server");
- toRet.add("-Djstorm.home=" + jstormHomePath);
- toRet.add("-Djava.library.path=" + conf.get(backtype.storm.Config.JAVA_LIBRARY_PATH));
- toRet.add("-Djstorm.conf.file=" + new File(JSTORM_CONF_PATH_STRING).getName());
-
- //for debug
-// toRet.add("-Xdebug -Xrunjdwp:transport=dt_socket,server=y,suspend=y,address=8000");
-
- toRet.add("-cp");
- toRet.add(buildClassPathArgument());
-
- /*
- if (conf.containsKey(childOptsKey)
- && conf.get(childOptsKey) != null) {
- toRet.add((String) conf.get(childOptsKey));
- }
- */
-
- return toRet;
- }
-
- @SuppressWarnings("rawtypes")
- static List<String> buildUICommands(Map conf) throws IOException {
- List<String> toRet =
- buildCommandPrefix(conf, backtype.storm.Config.UI_CHILDOPTS);
-
- toRet.add("-Dstorm.options=" + backtype.storm.Config.NIMBUS_HOST + "=localhost");
- toRet.add("-Dlogfile.name=" + System.getenv("JSTORM_LOG_DIR") + "/ui.log");
- toRet.add("backtype.storm.ui.core");
-
- return toRet;
- }
-
- @SuppressWarnings("rawtypes")
- static List<String> buildNimbusCommands(Map conf) throws IOException {
- List<String> toRet =
- buildCommandPrefix(conf, backtype.storm.Config.NIMBUS_CHILDOPTS);
-
- toRet.add("-Dlogfile.name=" + System.getenv("JSTORM_LOG_DIR") + "/nimbus.log");
- toRet.add("com.alibaba.jstorm.daemon.nimbus.NimbusServer");
-
- return toRet;
- }
-
- @SuppressWarnings("rawtypes")
- static List<String> buildSupervisorCommands(Map conf) throws IOException {
- List<String> toRet =
- buildCommandPrefix(conf, backtype.storm.Config.NIMBUS_CHILDOPTS);
-
- toRet.add("-Dworker.logdir="+ ApplicationConstants.LOG_DIR_EXPANSION_VAR);
- toRet.add("-Dlogfile.name=" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/supervisor.log");
- toRet.add("com.alibaba.jstorm.daemon.supervisor.Supervisor");
-
- return toRet;
- }
-
- private static String buildClassPathArgument() throws IOException {
- List<String> paths = new ArrayList<String>();
- paths.add(new File(JSTORM_CONF_PATH_STRING).getParent());
- paths.add(getJStormHome());
- for (String jarPath : findAllJarsInPaths(getJStormHome(), getJStormHome() + File.separator + "lib")) {
- paths.add(jarPath);
- }
- return Joiner.on(File.pathSeparatorChar).join(paths);
- }
-
- private static interface FileVisitor {
- public void visit(File file);
- }
-
- private static List<String> findAllJarsInPaths(String... pathStrs) {
- final LinkedHashSet<String> pathSet = new LinkedHashSet<String>();
-
- FileVisitor visitor = new FileVisitor() {
-
- @Override
- public void visit(File file) {
- String name = file.getName();
- if (name.endsWith(".jar")) {
- pathSet.add(file.getPath());
- }
- }
- };
-
- for (String path : pathStrs) {
- File file = new File(path);
- traverse(file, visitor);
- }
-
- final List<String> toRet = new ArrayList<String>();
- for (String p : pathSet) {
- toRet.add(p);
- }
- return toRet;
- }
-
- private static void traverse(File file, FileVisitor visitor) {
- if (file.isDirectory()) {
- File childs[] = file.listFiles();
- if (childs.length > 0) {
- for (int i = 0; i < childs.length; i++) {
- File child = childs[i];
- traverse(child, visitor);
- }
- }
- } else {
- visitor.visit(file);
- }
- }
-
- static String getApplicationHomeForId(String id) {
- if (id.isEmpty()) {
- throw new IllegalArgumentException(
- "The ID of the application cannot be empty.");
- }
- return ".jstorm" + Path.SEPARATOR + id;
- }
-
- /**
- * Returns a boolean to denote whether a cache file is visible to all(public)
- * or not
- * @param fs Hadoop file system
- * @param path file path
- * @return true if the path is visible to all, false otherwise
- * @throws IOException
- */
- static boolean isPublic(FileSystem fs, Path path) throws IOException {
- //the leaf level file should be readable by others
- if (!checkPermissionOfOther(fs, path, FsAction.READ)) {
- return false;
- }
- return ancestorsHaveExecutePermissions(fs, path.getParent());
- }
-
- /**
- * Checks for a given path whether the Other permissions on it
- * imply the permission in the passed FsAction
- * @param fs
- * @param path
- * @param action
- * @return true if the path in the uri is visible to all, false otherwise
- * @throws IOException
- */
- private static boolean checkPermissionOfOther(FileSystem fs, Path path,
- FsAction action) throws IOException {
- FileStatus status = fs.getFileStatus(path);
- FsPermission perms = status.getPermission();
- FsAction otherAction = perms.getOtherAction();
- if (otherAction.implies(action)) {
- return true;
- }
- return false;
- }
-
- /**
- * Returns true if all ancestors of the specified path have the 'execute'
- * permission set for all users (i.e. that other users can traverse
- * the directory hierarchy to the given path)
- */
- static boolean ancestorsHaveExecutePermissions(FileSystem fs, Path path) throws IOException {
- Path current = path;
- while (current != null) {
- //the subdirs in the path should have execute permissions for others
- if (!checkPermissionOfOther(fs, current, FsAction.EXECUTE)) {
- return false;
- }
- current = current.getParent();
- }
- return true;
- }
-
- static void redirectStreamAsync(final InputStream input, final PrintStream output) {
- new Thread(new Runnable() {
- @Override
- public void run() {
- Scanner scanner = new Scanner(input);
- while (scanner.hasNextLine()) {
- output.println(scanner.nextLine());
- }
- }
- }).start();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Version.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Version.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Version.java
deleted file mode 100644
index 1ae7b2b..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/Version.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-public class Version {
-
- private final String version;
- private final String build;
-
- public Version(String version, String build) {
- this.version = version;
- this.build = build;
- }
-
- public String version() {
- return this.version;
- }
-
- public String build() {
- return this.build;
- }
-
- @Override
- public String toString() {
- if (null == build || build.isEmpty()) {
- return version;
- } else {
- return version + "-" + build;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e1f68448/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/VersionCommand.java
----------------------------------------------------------------------
diff --git a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/VersionCommand.java b/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/VersionCommand.java
deleted file mode 100644
index 4c53425..0000000
--- a/jstorm-on-yarn/src/main/java/com/alibaba/jstorm/yarn/VersionCommand.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package com.alibaba.jstorm.yarn;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.Options;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.alibaba.jstorm.yarn.Client.ClientCommand;
-
-public class VersionCommand implements ClientCommand {
-
- private static final Logger LOG = LoggerFactory
- .getLogger(VersionCommand.class);
-
- VersionCommand() {
-
- }
- @Override
- public Options getOpts() {
- Options opts = new Options();
- return opts;
- }
-
- @Override
- public String getHeaderDescription() {
- return "jstorm-yarn version";
- }
-
- @Override
- public void process(CommandLine cl) throws Exception {
- Version version = Util.getJStormVersion();
- System.out.println(version.toString());
- }
-
-}
[26/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/ShellBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/ShellBolt.java b/jstorm-client/src/main/java/backtype/storm/task/ShellBolt.java
deleted file mode 100644
index ba79f7d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/ShellBolt.java
+++ /dev/null
@@ -1,383 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.task;
-
-import backtype.storm.Config;
-import backtype.storm.Constants;
-import backtype.storm.generated.ShellComponent;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.metric.api.rpc.IShellMetric;
-import backtype.storm.multilang.BoltMsg;
-import backtype.storm.multilang.ShellMsg;
-import backtype.storm.topology.ReportedFailedException;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.ShellProcess;
-import clojure.lang.RT;
-import com.google.common.util.concurrent.MoreExecutors;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.*;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
-import static java.util.concurrent.TimeUnit.SECONDS;
-
-/**
- * A bolt that shells out to another process to process tuples. ShellBolt
- * communicates with that process over stdio using a special protocol. An ~100
- * line library is required to implement that protocol, and adapter libraries
- * currently exist for Ruby and Python.
- *
- * <p>To run a ShellBolt on a cluster, the scripts that are shelled out to must be
- * in the resources directory within the jar submitted to the master.
- * During development/testing on a local machine, that resources directory just
- * needs to be on the classpath.</p>
- *
- * <p>When creating topologies using the Java API, subclass this bolt and implement
- * the IRichBolt interface to create components for the topology that use other languages. For example:
- * </p>
- *
- * <pre>
- * public class MyBolt extends ShellBolt implements IRichBolt {
- * public MyBolt() {
- * super("python", "mybolt.py");
- * }
- *
- * public void declareOutputFields(OutputFieldsDeclarer declarer) {
- * declarer.declare(new Fields("field1", "field2"));
- * }
- * }
- * </pre>
- */
-public class ShellBolt implements IBolt {
- public static final String HEARTBEAT_STREAM_ID = "__heartbeat";
- public static Logger LOG = LoggerFactory.getLogger(ShellBolt.class);
- Process _subprocess;
- OutputCollector _collector;
- Map<String, Tuple> _inputs = new ConcurrentHashMap<String, Tuple>();
-
- private String[] _command;
- private ShellProcess _process;
- private volatile boolean _running = true;
- private volatile Throwable _exception;
- private LinkedBlockingQueue _pendingWrites = new LinkedBlockingQueue();
- private Random _rand;
-
- private Thread _readerThread;
- private Thread _writerThread;
-
- private TopologyContext _context;
-
- private int workerTimeoutMills;
- private ScheduledExecutorService heartBeatExecutorService;
- private AtomicLong lastHeartbeatTimestamp = new AtomicLong();
- private AtomicBoolean sendHeartbeatFlag = new AtomicBoolean(false);
-
- public ShellBolt(ShellComponent component) {
- this(component.get_execution_command(), component.get_script());
- }
-
- public ShellBolt(String... command) {
- _command = command;
- }
-
- public void prepare(Map stormConf, TopologyContext context,
- final OutputCollector collector) {
- Object maxPending = stormConf.get(Config.TOPOLOGY_SHELLBOLT_MAX_PENDING);
- if (maxPending != null) {
- this._pendingWrites = new LinkedBlockingQueue(((Number)maxPending).intValue());
- }
- _rand = new Random();
- _collector = collector;
-
- _context = context;
-
- workerTimeoutMills = 1000 * RT.intCast(stormConf.get(Config.SUPERVISOR_WORKER_TIMEOUT_SECS));
-
- _process = new ShellProcess(_command);
-
- //subprocesses must send their pid first thing
- Number subpid = _process.launch(stormConf, context);
- LOG.info("Launched subprocess with pid " + subpid);
-
- // reader
- _readerThread = new Thread(new BoltReaderRunnable());
- _readerThread.start();
-
- _writerThread = new Thread(new BoltWriterRunnable());
- _writerThread.start();
-
- heartBeatExecutorService = MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1));
- heartBeatExecutorService.scheduleAtFixedRate(new BoltHeartbeatTimerTask(this), 1, 1, TimeUnit.SECONDS);
-
- LOG.info("Start checking heartbeat...");
- setHeartbeat();
- }
-
- public void execute(Tuple input) {
- if (_exception != null) {
- throw new RuntimeException(_exception);
- }
-
- //just need an id
- String genId = Long.toString(_rand.nextLong());
- _inputs.put(genId, input);
- try {
- BoltMsg boltMsg = createBoltMessage(input, genId);
-
- _pendingWrites.put(boltMsg);
- } catch(InterruptedException e) {
- String processInfo = _process.getProcessInfoString() + _process.getProcessTerminationInfoString();
- throw new RuntimeException("Error during multilang processing " + processInfo, e);
- }
- }
-
- private BoltMsg createBoltMessage(Tuple input, String genId) {
- BoltMsg boltMsg = new BoltMsg();
- boltMsg.setId(genId);
- boltMsg.setComp(input.getSourceComponent());
- boltMsg.setStream(input.getSourceStreamId());
- boltMsg.setTask(input.getSourceTask());
- boltMsg.setTuple(input.getValues());
- return boltMsg;
- }
-
- public void cleanup() {
- _running = false;
- heartBeatExecutorService.shutdownNow();
- _writerThread.interrupt();
- _readerThread.interrupt();
- _process.destroy();
- _inputs.clear();
- }
-
- private void handleAck(Object id) {
- Tuple acked = _inputs.remove(id);
- if(acked==null) {
- throw new RuntimeException("Acked a non-existent or already acked/failed id: " + id);
- }
- _collector.ack(acked);
- }
-
- private void handleFail(Object id) {
- Tuple failed = _inputs.remove(id);
- if(failed==null) {
- throw new RuntimeException("Failed a non-existent or already acked/failed id: " + id);
- }
- _collector.fail(failed);
- }
-
- private void handleError(String msg) {
- _collector.reportError(new Exception("Shell Process Exception: " + msg));
- }
-
- private void handleEmit(ShellMsg shellMsg) throws InterruptedException {
- List<Tuple> anchors = new ArrayList<Tuple>();
- List<String> recvAnchors = shellMsg.getAnchors();
- if (recvAnchors != null) {
- for (String anchor : recvAnchors) {
- Tuple t = _inputs.get(anchor);
- if (t == null) {
- throw new RuntimeException("Anchored onto " + anchor + " after ack/fail");
- }
- anchors.add(t);
- }
- }
-
- if(shellMsg.getTask() == 0) {
- List<Integer> outtasks = _collector.emit(shellMsg.getStream(), anchors, shellMsg.getTuple());
- if (shellMsg.areTaskIdsNeeded()) {
- _pendingWrites.put(outtasks);
- }
- } else {
- _collector.emitDirect((int) shellMsg.getTask(),
- shellMsg.getStream(), anchors, shellMsg.getTuple());
- }
- }
-
- private void handleLog(ShellMsg shellMsg) {
- String msg = shellMsg.getMsg();
- msg = "ShellLog " + _process.getProcessInfoString() + " " + msg;
- ShellMsg.ShellLogLevel logLevel = shellMsg.getLogLevel();
-
- switch (logLevel) {
- case TRACE:
- LOG.trace(msg);
- break;
- case DEBUG:
- LOG.debug(msg);
- break;
- case INFO:
- LOG.info(msg);
- break;
- case WARN:
- LOG.warn(msg);
- break;
- case ERROR:
- LOG.error(msg);
- _collector.reportError(new ReportedFailedException(msg));
- break;
- default:
- LOG.info(msg);
- break;
- }
- }
-
- private void handleMetrics(ShellMsg shellMsg) {
- //get metric name
- String name = shellMsg.getMetricName();
- if (name.isEmpty()) {
- throw new RuntimeException("Receive Metrics name is empty");
- }
-
- //get metric by name
- IMetric iMetric = _context.getRegisteredMetricByName(name);
- if (iMetric == null) {
- throw new RuntimeException("Could not find metric by name["+name+"] ");
- }
- if ( !(iMetric instanceof IShellMetric)) {
- throw new RuntimeException("Metric["+name+"] is not IShellMetric, can not call by RPC");
- }
- IShellMetric iShellMetric = (IShellMetric)iMetric;
-
- //call updateMetricFromRPC with params
- Object paramsObj = shellMsg.getMetricParams();
- try {
- iShellMetric.updateMetricFromRPC(paramsObj);
- } catch (RuntimeException re) {
- throw re;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private void setHeartbeat() {
- lastHeartbeatTimestamp.set(System.currentTimeMillis());
- }
-
- private long getLastHeartbeat() {
- return lastHeartbeatTimestamp.get();
- }
-
- private void die(Throwable exception) {
- String processInfo = _process.getProcessInfoString() + _process.getProcessTerminationInfoString();
- _exception = new RuntimeException(processInfo, exception);
- LOG.error("Halting process: ShellBolt died.", exception);
- _collector.reportError(exception);
- if (_running || (exception instanceof Error)) { //don't exit if not running, unless it is an Error
- System.exit(11);
- }
- }
-
- private class BoltHeartbeatTimerTask extends TimerTask {
- private ShellBolt bolt;
-
- public BoltHeartbeatTimerTask(ShellBolt bolt) {
- this.bolt = bolt;
- }
-
- @Override
- public void run() {
- long currentTimeMillis = System.currentTimeMillis();
- long lastHeartbeat = getLastHeartbeat();
-
- LOG.debug("BOLT - current time : {}, last heartbeat : {}, worker timeout (ms) : {}",
- currentTimeMillis, lastHeartbeat, workerTimeoutMills);
-
- if (currentTimeMillis - lastHeartbeat > workerTimeoutMills) {
- bolt.die(new RuntimeException("subprocess heartbeat timeout"));
- }
-
- sendHeartbeatFlag.compareAndSet(false, true);
- }
-
-
- }
-
- private class BoltReaderRunnable implements Runnable {
- public void run() {
- while (_running) {
- try {
- ShellMsg shellMsg = _process.readShellMsg();
-
- String command = shellMsg.getCommand();
- if (command == null) {
- throw new IllegalArgumentException("Command not found in bolt message: " + shellMsg);
- }
- if (command.equals("sync")) {
- setHeartbeat();
- } else if(command.equals("ack")) {
- handleAck(shellMsg.getId());
- } else if (command.equals("fail")) {
- handleFail(shellMsg.getId());
- } else if (command.equals("error")) {
- handleError(shellMsg.getMsg());
- } else if (command.equals("log")) {
- handleLog(shellMsg);
- } else if (command.equals("emit")) {
- handleEmit(shellMsg);
- } else if (command.equals("metrics")) {
- handleMetrics(shellMsg);
- }
- } catch (InterruptedException e) {
- } catch (Throwable t) {
- die(t);
- }
- }
- }
- }
-
- private class BoltWriterRunnable implements Runnable {
- public void run() {
- while (_running) {
- try {
- if (sendHeartbeatFlag.get()) {
- LOG.debug("BOLT - sending heartbeat request to subprocess");
-
- String genId = Long.toString(_rand.nextLong());
- _process.writeBoltMsg(createHeartbeatBoltMessage(genId));
- sendHeartbeatFlag.compareAndSet(true, false);
- }
-
- Object write = _pendingWrites.poll(1, SECONDS);
- if (write instanceof BoltMsg) {
- _process.writeBoltMsg((BoltMsg) write);
- } else if (write instanceof List<?>) {
- _process.writeTaskIds((List<Integer>)write);
- } else if (write != null) {
- throw new RuntimeException("Unknown class type to write: " + write.getClass().getName());
- }
- } catch (InterruptedException e) {
- } catch (Throwable t) {
- die(t);
- }
- }
- }
-
- private BoltMsg createHeartbeatBoltMessage(String genId) {
- BoltMsg msg = new BoltMsg();
- msg.setId(genId);
- msg.setTask(Constants.SYSTEM_TASK_ID);
- msg.setStream(HEARTBEAT_STREAM_ID);
- msg.setTuple(new ArrayList<Object>());
- return msg;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/TopologyContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/TopologyContext.java b/jstorm-client/src/main/java/backtype/storm/task/TopologyContext.java
deleted file mode 100644
index 34ef4fa..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/TopologyContext.java
+++ /dev/null
@@ -1,317 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.hooks.ITaskHook;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.metric.api.IReducer;
-import backtype.storm.metric.api.ICombiner;
-import backtype.storm.metric.api.ReducedMetric;
-import backtype.storm.metric.api.CombinedMetric;
-import backtype.storm.state.ISubscribedState;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.apache.commons.lang.NotImplementedException;
-
-/**
- * A TopologyContext is given to bolts and spouts in their "prepare" and "open"
- * methods, respectively. This object provides information about the component's
- * place within the topology, such as task ids, inputs and outputs, etc.
- *
- * <p>
- * The TopologyContext is also used to declare ISubscribedState objects to
- * synchronize state with StateSpouts this object is subscribed to.
- * </p>
- */
-public class TopologyContext extends WorkerTopologyContext implements
- IMetricsContext {
- private Integer _taskId;
- private Map<String, Object> _taskData = new HashMap<String, Object>();
- private List<ITaskHook> _hooks = new ArrayList<ITaskHook>();
- private Map<String, Object> _executorData;
- private Map<Integer, Map<Integer, Map<String, IMetric>>> _registeredMetrics;
- private clojure.lang.Atom _openOrPrepareWasCalled;
-
- public TopologyContext(StormTopology topology, Map stormConf,
- Map<Integer, String> taskToComponent,
- Map<String, List<Integer>> componentToSortedTasks,
- Map<String, Map<String, Fields>> componentToStreamToFields,
- String topologyId, String codeDir, String pidDir, Integer taskId,
- Integer workerPort, List<Integer> workerTasks,
- Map<String, Object> defaultResources,
- Map<String, Object> userResources,
- Map<String, Object> executorData, Map registeredMetrics,
- clojure.lang.Atom openOrPrepareWasCalled) {
- super(topology, stormConf, taskToComponent, componentToSortedTasks,
- componentToStreamToFields, topologyId, codeDir, pidDir,
- workerPort, workerTasks, defaultResources, userResources);
- _taskId = taskId;
- _executorData = executorData;
- _registeredMetrics = registeredMetrics;
- _openOrPrepareWasCalled = openOrPrepareWasCalled;
- }
-
- /**
- * All state from all subscribed state spouts streams will be synced with
- * the provided object.
- *
- * <p>
- * It is recommended that your ISubscribedState object is kept as an
- * instance variable of this object. The recommended usage of this method is
- * as follows:
- * </p>
- *
- * <p>
- * _myState = context.setAllSubscribedState(new MyState());
- * </p>
- *
- * @param obj
- * Provided ISubscribedState implementation
- * @return Returns the ISubscribedState object provided
- */
- public <T extends ISubscribedState> T setAllSubscribedState(T obj) {
- // check that only subscribed to one component/stream for statespout
- // setsubscribedstate appropriately
- throw new NotImplementedException();
- }
-
- /**
- * Synchronizes the default stream from the specified state spout component
- * id with the provided ISubscribedState object.
- *
- * <p>
- * The recommended usage of this method is as follows:
- * </p>
- * <p>
- * _myState = context.setSubscribedState(componentId, new MyState());
- * </p>
- *
- * @param componentId
- * the id of the StateSpout component to subscribe to
- * @param obj
- * Provided ISubscribedState implementation
- * @return Returns the ISubscribedState object provided
- */
- public <T extends ISubscribedState> T setSubscribedState(
- String componentId, T obj) {
- return setSubscribedState(componentId, Utils.DEFAULT_STREAM_ID, obj);
- }
-
- /**
- * Synchronizes the specified stream from the specified state spout
- * component id with the provided ISubscribedState object.
- *
- * <p>
- * The recommended usage of this method is as follows:
- * </p>
- * <p>
- * _myState = context.setSubscribedState(componentId, streamId, new
- * MyState());
- * </p>
- *
- * @param componentId
- * the id of the StateSpout component to subscribe to
- * @param streamId
- * the stream to subscribe to
- * @param obj
- * Provided ISubscribedState implementation
- * @return Returns the ISubscribedState object provided
- */
- public <T extends ISubscribedState> T setSubscribedState(
- String componentId, String streamId, T obj) {
- throw new NotImplementedException();
- }
-
- /**
- * Gets the task id of this task.
- *
- * @return the task id
- */
- public int getThisTaskId() {
- return _taskId;
- }
-
- /**
- * Gets the component id for this task. The component id maps to a component
- * id specified for a Spout or Bolt in the topology definition.
- *
- * @return
- */
- public String getThisComponentId() {
- return getComponentId(_taskId);
- }
-
- /**
- * Gets the declared output fields for the specified stream id for the
- * component this task is a part of.
- */
- public Fields getThisOutputFields(String streamId) {
- return getComponentOutputFields(getThisComponentId(), streamId);
- }
-
- /**
- * Gets the set of streams declared for the component of this task.
- */
- public Set<String> getThisStreams() {
- return getComponentStreams(getThisComponentId());
- }
-
- /**
- * Gets the index of this task id in
- * getComponentTasks(getThisComponentId()). An example use case for this
- * method is determining which task accesses which resource in a distributed
- * resource to ensure an even distribution.
- */
- public int getThisTaskIndex() {
- List<Integer> tasks = new ArrayList<Integer>(
- getComponentTasks(getThisComponentId()));
- Collections.sort(tasks);
- for (int i = 0; i < tasks.size(); i++) {
- if (tasks.get(i) == getThisTaskId()) {
- return i;
- }
- }
- throw new RuntimeException(
- "Fatal: could not find this task id in this component");
- }
-
- /**
- * Gets the declared inputs to this component.
- *
- * @return A map from subscribed component/stream to the grouping subscribed
- * with.
- */
- public Map<GlobalStreamId, Grouping> getThisSources() {
- return getSources(getThisComponentId());
- }
-
- /**
- * Gets information about who is consuming the outputs of this component,
- * and how.
- *
- * @return Map from stream id to component id to the Grouping used.
- */
- public Map<String, Map<String, Grouping>> getThisTargets() {
- return getTargets(getThisComponentId());
- }
-
- public void setTaskData(String name, Object data) {
- _taskData.put(name, data);
- }
-
- public Object getTaskData(String name) {
- return _taskData.get(name);
- }
-
- public void setExecutorData(String name, Object data) {
- _executorData.put(name, data);
- }
-
- public Object getExecutorData(String name) {
- return _executorData.get(name);
- }
-
- public void addTaskHook(ITaskHook hook) {
- hook.prepare(_stormConf, this);
- _hooks.add(hook);
- }
-
- public Collection<ITaskHook> getHooks() {
- return _hooks;
- }
-
- /*
- * Register a IMetric instance. Storm will then call getValueAndReset on the
- * metric every timeBucketSizeInSecs and the returned value is sent to all
- * metrics consumers. You must call this during IBolt::prepare or
- * ISpout::open.
- *
- * @return The IMetric argument unchanged.
- */
- public <T extends IMetric> T registerMetric(String name, T metric,
- int timeBucketSizeInSecs) {
- if ((Boolean) _openOrPrepareWasCalled.deref() == true) {
- throw new RuntimeException(
- "TopologyContext.registerMetric can only be called from within overridden "
- + "IBolt::prepare() or ISpout::open() method.");
- }
-
- if (metric == null) {
- throw new IllegalArgumentException("Cannot register a null metric");
- }
-
- if (timeBucketSizeInSecs <= 0) {
- throw new IllegalArgumentException("TopologyContext.registerMetric can only be called with timeBucketSizeInSecs " +
- "greater than or equal to 1 second.");
- }
-
- if (getRegisteredMetricByName(name) != null) {
- throw new RuntimeException("The same metric name `" + name + "` was registered twice." );
- }
-
- Map m1 = _registeredMetrics;
- if (!m1.containsKey(timeBucketSizeInSecs)) {
- m1.put(timeBucketSizeInSecs, new HashMap());
- }
-
- Map m2 = (Map) m1.get(timeBucketSizeInSecs);
- if (!m2.containsKey(_taskId)) {
- m2.put(_taskId, new HashMap());
- }
-
- Map m3 = (Map) m2.get(_taskId);
- if (m3.containsKey(name)) {
- throw new RuntimeException("The same metric name `" + name
- + "` was registered twice.");
- } else {
- m3.put(name, metric);
- }
-
- return metric;
- }
-
- /**
- * Get component's metric from registered metrics by name.
- * Notice: Normally, one component can only register one metric name once.
- * But now registerMetric has a bug(https://issues.apache.org/jira/browse/STORM-254)
- * cause the same metric name can register twice.
- * So we just return the first metric we meet.
- */
- public IMetric getRegisteredMetricByName(String name) {
- IMetric metric = null;
-
- for (Map<Integer, Map<String, IMetric>> taskIdToNameToMetric: _registeredMetrics.values()) {
- Map<String, IMetric> nameToMetric = taskIdToNameToMetric.get(_taskId);
- if (nameToMetric != null) {
- metric = nameToMetric.get(name);
- if (metric != null) {
- //we just return the first metric we meet
- break;
- }
- }
- }
-
- return metric;
- }
-
- /*
- * Convinience method for registering ReducedMetric.
- */
- public ReducedMetric registerMetric(String name, IReducer reducer, int timeBucketSizeInSecs) {
- return registerMetric(name, new ReducedMetric(reducer), timeBucketSizeInSecs);
- }
- /*
- * Convinience method for registering CombinedMetric.
- */
- public CombinedMetric registerMetric(String name, ICombiner combiner, int timeBucketSizeInSecs) {
- return registerMetric(name, new CombinedMetric(combiner), timeBucketSizeInSecs);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/WorkerTopologyContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/WorkerTopologyContext.java b/jstorm-client/src/main/java/backtype/storm/task/WorkerTopologyContext.java
deleted file mode 100644
index 42e88dc..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/WorkerTopologyContext.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.generated.StormTopology;
-import backtype.storm.tuple.Fields;
-import java.io.File;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ExecutorService;
-
-public class WorkerTopologyContext extends GeneralTopologyContext {
- public static final String SHARED_EXECUTOR = "executor";
-
- private Integer _workerPort;
- private List<Integer> _workerTasks;
- private String _codeDir;
- private String _pidDir;
- Map<String, Object> _userResources;
- Map<String, Object> _defaultResources;
-
- public WorkerTopologyContext(StormTopology topology, Map stormConf,
- Map<Integer, String> taskToComponent,
- Map<String, List<Integer>> componentToSortedTasks,
- Map<String, Map<String, Fields>> componentToStreamToFields,
- String topologyId, String codeDir, String pidDir, Integer workerPort,
- List<Integer> workerTasks, Map<String, Object> defaultResources,
- Map<String, Object> userResources) {
- super(topology, stormConf, taskToComponent, componentToSortedTasks,
- componentToStreamToFields, topologyId);
- _codeDir = codeDir;
- _defaultResources = defaultResources;
- _userResources = userResources;
- try {
- if (pidDir != null) {
- _pidDir = new File(pidDir).getCanonicalPath();
- } else {
- _pidDir = null;
- }
- } catch (IOException e) {
- throw new RuntimeException("Could not get canonical path for "
- + _pidDir, e);
- }
- _workerPort = workerPort;
- _workerTasks = workerTasks;
- }
-
- /**
- * Gets all the task ids that are running in this worker process (including
- * the task for this task).
- */
- public List<Integer> getThisWorkerTasks() {
- return _workerTasks;
- }
-
- public Integer getThisWorkerPort() {
- return _workerPort;
- }
-
- /**
- * Gets the location of the external resources for this worker on the local
- * filesystem. These external resources typically include bolts implemented
- * in other languages, such as Ruby or Python.
- */
- public String getCodeDir() {
- return _codeDir;
- }
-
- /**
- * If this task spawns any subprocesses, those subprocesses must immediately
- * write their PID to this directory on the local filesystem to ensure that
- * Storm properly destroys that process when the worker is shutdown.
- */
- public String getPIDDir() {
- return _pidDir;
- }
-
- public Object getResource(String name) {
- return _userResources.get(name);
- }
-
- public ExecutorService getSharedExecutor() {
- return (ExecutorService) _defaultResources.get(SHARED_EXECUTOR);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/AckFailDelegate.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/AckFailDelegate.java b/jstorm-client/src/main/java/backtype/storm/testing/AckFailDelegate.java
deleted file mode 100644
index 131dee7..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/AckFailDelegate.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.testing;
-
-import java.io.Serializable;
-
-public interface AckFailDelegate extends Serializable {
- public void ack(Object id);
-
- public void fail(Object id);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/AckFailMapTracker.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/AckFailMapTracker.java b/jstorm-client/src/main/java/backtype/storm/testing/AckFailMapTracker.java
deleted file mode 100644
index 68d334d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/AckFailMapTracker.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.utils.RegisteredGlobalState;
-import java.util.HashSet;
-import java.util.Set;
-
-public class AckFailMapTracker implements AckFailDelegate {
-
- String _acked;
- String _failed;
-
- public AckFailMapTracker() {
- _acked = RegisteredGlobalState.registerState(new HashSet());
- _failed = RegisteredGlobalState.registerState(new HashSet());
- }
-
- public boolean isAcked(Object id) {
- return ((Set) RegisteredGlobalState.getState(_acked)).contains(id);
- }
-
- public boolean isFailed(Object id) {
- return ((Set) RegisteredGlobalState.getState(_failed)).contains(id);
- }
-
- @Override
- public void ack(Object id) {
- ((Set) RegisteredGlobalState.getState(_acked)).add(id);
- }
-
- @Override
- public void fail(Object id) {
- ((Set) RegisteredGlobalState.getState(_failed)).add(id);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/AckTracker.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/AckTracker.java b/jstorm-client/src/main/java/backtype/storm/testing/AckTracker.java
deleted file mode 100644
index 134f8f0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/AckTracker.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package backtype.storm.testing;
-
-import java.util.Map;
-import java.util.UUID;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class AckTracker implements AckFailDelegate {
- private static Map<String, AtomicInteger> acks = new ConcurrentHashMap<String, AtomicInteger>();
-
- private String _id;
-
- public AckTracker() {
- _id = UUID.randomUUID().toString();
- acks.put(_id, new AtomicInteger(0));
- }
-
- @Override
- public void ack(Object id) {
- acks.get(_id).incrementAndGet();
- }
-
- @Override
- public void fail(Object id) {
- }
-
- public int getNumAcks() {
- return acks.get(_id).intValue();
- }
-
- public void resetNumAcks() {
- acks.get(_id).set(0);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/BatchNumberList.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/BatchNumberList.java b/jstorm-client/src/main/java/backtype/storm/testing/BatchNumberList.java
deleted file mode 100644
index dd6530e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/BatchNumberList.java
+++ /dev/null
@@ -1,55 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBatchBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-
-public class BatchNumberList extends BaseBatchBolt {
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("word", "list"));
- }
-
- String _wordComponent;
-
- public BatchNumberList(String wordComponent) {
- _wordComponent = wordComponent;
- }
-
- String word = null;
- List<Integer> intSet = new ArrayList<Integer>();
- BatchOutputCollector _collector;
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, Object id) {
- _collector = collector;
- }
-
- @Override
- public void execute(Tuple tuple) {
- if (tuple.getSourceComponent().equals(_wordComponent)) {
- this.word = tuple.getString(1);
- } else {
- intSet.add(tuple.getInteger(1));
- }
- }
-
- @Override
- public void finishBatch() {
- if (word != null) {
- Collections.sort(intSet);
- _collector.emit(new Values(word, intSet));
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/BatchProcessWord.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/BatchProcessWord.java b/jstorm-client/src/main/java/backtype/storm/testing/BatchProcessWord.java
deleted file mode 100644
index 21f316f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/BatchProcessWord.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-public class BatchProcessWord extends BaseBasicBolt {
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("id", "size"));
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- collector.emit(new Values(input.getValue(0), input.getString(1)
- .length()));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/BatchRepeatA.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/BatchRepeatA.java b/jstorm-client/src/main/java/backtype/storm/testing/BatchRepeatA.java
deleted file mode 100644
index 9cb06c3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/BatchRepeatA.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-public class BatchRepeatA extends BaseBasicBolt {
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- Object id = input.getValue(0);
- String word = input.getString(1);
- for (int i = 0; i < word.length(); i++) {
- if (word.charAt(i) == 'a') {
- collector.emit("multi", new Values(id, word.substring(0, i)));
- }
- }
- collector.emit("single", new Values(id, word));
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream("multi", new Fields("id", "word"));
- declarer.declareStream("single", new Fields("id", "word"));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/BoltTracker.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/BoltTracker.java b/jstorm-client/src/main/java/backtype/storm/testing/BoltTracker.java
deleted file mode 100644
index fb928c8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/BoltTracker.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import java.util.HashMap;
-import java.util.Map;
-
-public class BoltTracker extends NonRichBoltTracker implements IRichBolt {
- IRichBolt _richDelegate;
-
- public BoltTracker(IRichBolt delegate, String id) {
- super(delegate, id);
- _richDelegate = delegate;
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _richDelegate.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return new HashMap<String, Object>();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/CompleteTopologyParam.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/CompleteTopologyParam.java b/jstorm-client/src/main/java/backtype/storm/testing/CompleteTopologyParam.java
deleted file mode 100644
index 4017ddb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/CompleteTopologyParam.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.Config;
-
-/**
- * The param class for the <code>Testing.completeTopology</code>.
- */
-public class CompleteTopologyParam {
- /**
- * The mocked spout sources
- */
- private MockedSources mockedSources;
- /**
- * the config for the topology when it was submitted to the cluster
- */
- private Config stormConf;
- /**
- * whether cleanup the state?
- */
- private Boolean cleanupState;
- /**
- * the topology name you want to submit to the cluster
- */
- private String topologyName;
-
- public MockedSources getMockedSources() {
- return mockedSources;
- }
-
- public void setMockedSources(MockedSources mockedSources) {
- this.mockedSources = mockedSources;
- }
-
- public Config getStormConf() {
- return stormConf;
- }
-
- public void setStormConf(Config stormConf) {
- this.stormConf = stormConf;
- }
-
- public Boolean getCleanupState() {
- return cleanupState;
- }
-
- public void setCleanupState(Boolean cleanupState) {
- this.cleanupState = cleanupState;
- }
-
- public String getTopologyName() {
- return topologyName;
- }
-
- public void setTopologyName(String topologyName) {
- this.topologyName = topologyName;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/CountingBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/CountingBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/CountingBatchBolt.java
deleted file mode 100644
index 75dca1a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/CountingBatchBolt.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBatchBolt;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.Map;
-
-public class CountingBatchBolt extends BaseBatchBolt {
- BatchOutputCollector _collector;
- Object _id;
- int _count = 0;
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, Object id) {
- _collector = collector;
- _id = id;
- }
-
- @Override
- public void execute(Tuple tuple) {
- _count++;
- }
-
- @Override
- public void finishBatch() {
- _collector.emit(new Values(_id, _count));
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("tx", "count"));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/CountingCommitBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/CountingCommitBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/CountingCommitBolt.java
deleted file mode 100644
index e3533a6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/CountingCommitBolt.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.topology.base.BaseTransactionalBolt;
-import backtype.storm.transactional.ICommitter;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.Map;
-
-public class CountingCommitBolt extends BaseTransactionalBolt implements
- ICommitter {
- BatchOutputCollector _collector;
- TransactionAttempt _id;
- int _count = 0;
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, TransactionAttempt id) {
- _id = id;
- _collector = collector;
- }
-
- @Override
- public void execute(Tuple tuple) {
- _count++;
- }
-
- @Override
- public void finishBatch() {
- _collector.emit(new Values(_id, _count));
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("tx", "count"));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/FeederSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/FeederSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/FeederSpout.java
deleted file mode 100644
index 871e573..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/FeederSpout.java
+++ /dev/null
@@ -1,83 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.OutputFieldsDeclarer;
-import java.util.Map;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.InprocMessaging;
-import java.util.HashMap;
-import java.util.List;
-import java.util.UUID;
-
-public class FeederSpout extends BaseRichSpout {
- private int _id;
- private Fields _outFields;
- private SpoutOutputCollector _collector;
- private AckFailDelegate _ackFailDelegate;
-
- public FeederSpout(Fields outFields) {
- _id = InprocMessaging.acquireNewPort();
- _outFields = outFields;
- }
-
- public void setAckFailDelegate(AckFailDelegate d) {
- _ackFailDelegate = d;
- }
-
- public void feed(List<Object> tuple) {
- feed(tuple, UUID.randomUUID().toString());
- }
-
- public void feed(List<Object> tuple, Object msgId) {
- InprocMessaging.sendMessage(_id, new Values(tuple, msgId));
- }
-
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _collector = collector;
- }
-
- public void close() {
-
- }
-
- public void nextTuple() {
- List<Object> toEmit = (List<Object>) InprocMessaging.pollMessage(_id);
- if (toEmit != null) {
- List<Object> tuple = (List<Object>) toEmit.get(0);
- Object msgId = toEmit.get(1);
-
- _collector.emit(tuple, msgId);
- } else {
- try {
- Thread.sleep(1);
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
- }
-
- public void ack(Object msgId) {
- if (_ackFailDelegate != null) {
- _ackFailDelegate.ack(msgId);
- }
- }
-
- public void fail(Object msgId) {
- if (_ackFailDelegate != null) {
- _ackFailDelegate.fail(msgId);
- }
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(_outFields);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return new HashMap<String, Object>();
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/FixedTuple.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/FixedTuple.java b/jstorm-client/src/main/java/backtype/storm/testing/FixedTuple.java
deleted file mode 100644
index e4cc089..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/FixedTuple.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.utils.Utils;
-import java.io.Serializable;
-import java.util.List;
-
-public class FixedTuple implements Serializable {
- public String stream;
- public List<Object> values;
-
- public FixedTuple(List<Object> values) {
- this.stream = Utils.DEFAULT_STREAM_ID;
- this.values = values;
- }
-
- public FixedTuple(String stream, List<Object> values) {
- this.stream = stream;
- this.values = values;
- }
-
- @Override
- public String toString() {
- return stream + ":" + "<" + values.toString() + ">";
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/FixedTupleSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/FixedTupleSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/FixedTupleSpout.java
deleted file mode 100644
index e463df0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/FixedTupleSpout.java
+++ /dev/null
@@ -1,164 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-import static backtype.storm.utils.Utils.get;
-
-public class FixedTupleSpout implements IRichSpout {
- private static final Map<String, Integer> acked = new HashMap<String, Integer>();
- private static final Map<String, Integer> failed = new HashMap<String, Integer>();
-
- public static int getNumAcked(String stormId) {
- synchronized (acked) {
- return get(acked, stormId, 0);
- }
- }
-
- public static int getNumFailed(String stormId) {
- synchronized (failed) {
- return get(failed, stormId, 0);
- }
- }
-
- public static void clear(String stormId) {
- acked.remove(stormId);
- failed.remove(stormId);
- }
-
- private List<FixedTuple> _tuples;
- private SpoutOutputCollector _collector;
-
- private TopologyContext _context;
- private List<FixedTuple> _serveTuples;
- private Map<String, FixedTuple> _pending;
-
- private String _id;
- private String _fieldName;
-
- public FixedTupleSpout(List tuples) {
- this(tuples, null);
- }
-
- public FixedTupleSpout(List tuples, String fieldName) {
- _id = UUID.randomUUID().toString();
- synchronized (acked) {
- acked.put(_id, 0);
- }
- synchronized (failed) {
- failed.put(_id, 0);
- }
- _tuples = new ArrayList<FixedTuple>();
- for (Object o : tuples) {
- FixedTuple ft;
- if (o instanceof FixedTuple) {
- ft = (FixedTuple) o;
- } else {
- ft = new FixedTuple((List) o);
- }
- _tuples.add(ft);
- }
- _fieldName = fieldName;
- }
-
- public List<FixedTuple> getSourceTuples() {
- return _tuples;
- }
-
- public int getCompleted() {
- int ackedAmt;
- int failedAmt;
-
- synchronized (acked) {
- ackedAmt = acked.get(_id);
- }
- synchronized (failed) {
- failedAmt = failed.get(_id);
- }
- return ackedAmt + failedAmt;
- }
-
- public void cleanup() {
- synchronized (acked) {
- acked.remove(_id);
- }
- synchronized (failed) {
- failed.remove(_id);
- }
- }
-
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _context = context;
- List<Integer> tasks = context.getComponentTasks(context
- .getThisComponentId());
- int startIndex;
- for (startIndex = 0; startIndex < tasks.size(); startIndex++) {
- if (tasks.get(startIndex) == context.getThisTaskId()) {
- break;
- }
- }
- _collector = collector;
- _pending = new HashMap<String, FixedTuple>();
- _serveTuples = new ArrayList<FixedTuple>();
- for (int i = startIndex; i < _tuples.size(); i += tasks.size()) {
- _serveTuples.add(_tuples.get(i));
- }
- }
-
- public void close() {
- }
-
- public void nextTuple() {
- if (_serveTuples.size() > 0) {
- FixedTuple ft = _serveTuples.remove(0);
- String id = UUID.randomUUID().toString();
- _pending.put(id, ft);
- _collector.emit(ft.stream, ft.values, id);
- } else {
- Utils.sleep(100);
- }
- }
-
- public void ack(Object msgId) {
- synchronized (acked) {
- int curr = get(acked, _id, 0);
- acked.put(_id, curr + 1);
- }
- }
-
- public void fail(Object msgId) {
- synchronized (failed) {
- int curr = get(failed, _id, 0);
- failed.put(_id, curr + 1);
- }
- }
-
- @Override
- public void activate() {
- }
-
- @Override
- public void deactivate() {
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- if (_fieldName != null) {
- declarer.declare(new Fields(_fieldName));
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/IdentityBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/IdentityBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/IdentityBolt.java
deleted file mode 100644
index b3f8d87..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/IdentityBolt.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-
-public class IdentityBolt extends BaseBasicBolt {
- Fields _fields;
-
- public IdentityBolt(Fields fields) {
- _fields = fields;
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- collector.emit(input.getValues());
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(_fields);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingBatchBolt.java
deleted file mode 100644
index 58ae380..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingBatchBolt.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBatchBolt;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-import java.util.HashMap;
-import java.util.Map;
-
-public class KeyedCountingBatchBolt extends BaseBatchBolt {
- BatchOutputCollector _collector;
- Object _id;
- Map<Object, Integer> _counts = new HashMap<Object, Integer>();
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, Object id) {
- _collector = collector;
- _id = id;
- }
-
- @Override
- public void execute(Tuple tuple) {
- Object key = tuple.getValue(1);
- int curr = Utils.get(_counts, key, 0);
- _counts.put(key, curr + 1);
- }
-
- @Override
- public void finishBatch() {
- for (Object key : _counts.keySet()) {
- _collector.emit(new Values(_id, key, _counts.get(key)));
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("tx", "key", "count"));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingCommitterBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingCommitterBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingCommitterBolt.java
deleted file mode 100644
index a170130..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/KeyedCountingCommitterBolt.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.transactional.ICommitter;
-
-public class KeyedCountingCommitterBolt extends KeyedCountingBatchBolt
- implements ICommitter {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/KeyedSummingBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/KeyedSummingBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/KeyedSummingBatchBolt.java
deleted file mode 100644
index c12a319..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/KeyedSummingBatchBolt.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBatchBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-import clojure.lang.Numbers;
-import java.util.HashMap;
-import java.util.Map;
-
-public class KeyedSummingBatchBolt extends BaseBatchBolt {
- BatchOutputCollector _collector;
- Object _id;
- Map<Object, Number> _sums = new HashMap<Object, Number>();
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, Object id) {
- _collector = collector;
- _id = id;
- }
-
- @Override
- public void execute(Tuple tuple) {
- Object key = tuple.getValue(1);
- Number curr = Utils.get(_sums, key, 0);
- _sums.put(key, Numbers.add(curr, tuple.getValue(2)));
- }
-
- @Override
- public void finishBatch() {
- for (Object key : _sums.keySet()) {
- _collector.emit(new Values(_id, key, _sums.get(key)));
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("tx", "key", "sum"));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpout.java
deleted file mode 100644
index bf6286f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpout.java
+++ /dev/null
@@ -1,187 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.transactional.partitioned.IPartitionedTransactionalSpout;
-import backtype.storm.transactional.partitioned.IPartitionedTransactionalSpout.Emitter;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.RegisteredGlobalState;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-public class MemoryTransactionalSpout implements
- IPartitionedTransactionalSpout<MemoryTransactionalSpoutMeta> {
- public static String TX_FIELD = MemoryTransactionalSpout.class.getName()
- + "/id";
-
- private String _id;
- private String _finishedPartitionsId;
- private int _takeAmt;
- private Fields _outFields;
- private Map<Integer, List<List<Object>>> _initialPartitions;
-
- public MemoryTransactionalSpout(
- Map<Integer, List<List<Object>>> partitions, Fields outFields,
- int takeAmt) {
- _id = RegisteredGlobalState.registerState(partitions);
- Map<Integer, Boolean> finished = Collections
- .synchronizedMap(new HashMap<Integer, Boolean>());
- _finishedPartitionsId = RegisteredGlobalState.registerState(finished);
- _takeAmt = takeAmt;
- _outFields = outFields;
- _initialPartitions = partitions;
- }
-
- public boolean isExhaustedTuples() {
- Map<Integer, Boolean> statuses = getFinishedStatuses();
- for (Integer partition : getQueues().keySet()) {
- if (!statuses.containsKey(partition)
- || !getFinishedStatuses().get(partition)) {
- return false;
- }
- }
- return true;
- }
-
- class Coordinator implements IPartitionedTransactionalSpout.Coordinator {
-
- @Override
- public int numPartitions() {
- return getQueues().size();
- }
-
- @Override
- public boolean isReady() {
- return true;
- }
-
- @Override
- public void close() {
- }
- }
-
- class Emitter
- implements
- IPartitionedTransactionalSpout.Emitter<MemoryTransactionalSpoutMeta> {
-
- Integer _maxSpoutPending;
- Map<Integer, Integer> _emptyPartitions = new HashMap<Integer, Integer>();
-
- public Emitter(Map conf) {
- Object c = conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING);
- if (c == null)
- _maxSpoutPending = 1;
- else
- _maxSpoutPending = Utils.getInt(c);
- }
-
- @Override
- public MemoryTransactionalSpoutMeta emitPartitionBatchNew(
- TransactionAttempt tx, BatchOutputCollector collector,
- int partition, MemoryTransactionalSpoutMeta lastPartitionMeta) {
- int index;
- if (lastPartitionMeta == null) {
- index = 0;
- } else {
- index = lastPartitionMeta.index + lastPartitionMeta.amt;
- }
- List<List<Object>> queue = getQueues().get(partition);
- int total = queue.size();
- int left = total - index;
- int toTake = Math.min(left, _takeAmt);
-
- MemoryTransactionalSpoutMeta ret = new MemoryTransactionalSpoutMeta(
- index, toTake);
- emitPartitionBatch(tx, collector, partition, ret);
- if (toTake == 0) {
- // this is a pretty hacky way to determine when all the
- // partitions have been committed
- // wait until we've emitted max-spout-pending empty partitions
- // for the partition
- int curr = Utils.get(_emptyPartitions, partition, 0) + 1;
- _emptyPartitions.put(partition, curr);
- if (curr > _maxSpoutPending) {
- Map<Integer, Boolean> finishedStatuses = getFinishedStatuses();
- // will be null in remote mode
- if (finishedStatuses != null) {
- finishedStatuses.put(partition, true);
- }
- }
- }
- return ret;
- }
-
- @Override
- public void emitPartitionBatch(TransactionAttempt tx,
- BatchOutputCollector collector, int partition,
- MemoryTransactionalSpoutMeta partitionMeta) {
- List<List<Object>> queue = getQueues().get(partition);
- for (int i = partitionMeta.index; i < partitionMeta.index
- + partitionMeta.amt; i++) {
- List<Object> toEmit = new ArrayList<Object>(queue.get(i));
- toEmit.add(0, tx);
- collector.emit(toEmit);
- }
- }
-
- @Override
- public void close() {
- }
- }
-
- @Override
- public IPartitionedTransactionalSpout.Coordinator getCoordinator(Map conf,
- TopologyContext context) {
- return new Coordinator();
- }
-
- @Override
- public IPartitionedTransactionalSpout.Emitter<MemoryTransactionalSpoutMeta> getEmitter(
- Map conf, TopologyContext context) {
- return new Emitter(conf);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- List<String> toDeclare = new ArrayList<String>(_outFields.toList());
- toDeclare.add(0, TX_FIELD);
- declarer.declare(new Fields(toDeclare));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Config conf = new Config();
- conf.registerSerialization(MemoryTransactionalSpoutMeta.class);
- return conf;
- }
-
- public void startup() {
- getFinishedStatuses().clear();
- }
-
- public void cleanup() {
- RegisteredGlobalState.clearState(_id);
- RegisteredGlobalState.clearState(_finishedPartitionsId);
- }
-
- private Map<Integer, List<List<Object>>> getQueues() {
- Map<Integer, List<List<Object>>> ret = (Map<Integer, List<List<Object>>>) RegisteredGlobalState
- .getState(_id);
- if (ret != null)
- return ret;
- else
- return _initialPartitions;
- }
-
- private Map<Integer, Boolean> getFinishedStatuses() {
- return (Map<Integer, Boolean>) RegisteredGlobalState
- .getState(_finishedPartitionsId);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpoutMeta.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpoutMeta.java b/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpoutMeta.java
deleted file mode 100644
index 4d87a66..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/MemoryTransactionalSpoutMeta.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package backtype.storm.testing;
-
-public class MemoryTransactionalSpoutMeta {
- int index;
- int amt;
-
- // for kryo compatibility
- public MemoryTransactionalSpoutMeta() {
-
- }
-
- public MemoryTransactionalSpoutMeta(int index, int amt) {
- this.index = index;
- this.amt = amt;
- }
-
- @Override
- public String toString() {
- return "index: " + index + "; amt: " + amt;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/MkClusterParam.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/MkClusterParam.java b/jstorm-client/src/main/java/backtype/storm/testing/MkClusterParam.java
deleted file mode 100644
index 8ec7102..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/MkClusterParam.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm.testing;
-
-import java.util.Map;
-
-/**
- * The param arg for <code>Testing.withSimulatedTimeCluster</code> and
- * <code>Testing.withTrackedCluster</code>
- */
-public class MkClusterParam {
- /**
- * count of supervisors for the cluster.
- */
- private Integer supervisors;
- /**
- * count of port for each supervisor
- */
- private Integer portsPerSupervisor;
- /**
- * cluster config
- */
- private Map daemonConf;
-
- public Integer getSupervisors() {
- return supervisors;
- }
-
- public void setSupervisors(Integer supervisors) {
- this.supervisors = supervisors;
- }
-
- public Integer getPortsPerSupervisor() {
- return portsPerSupervisor;
- }
-
- public void setPortsPerSupervisor(Integer portsPerSupervisor) {
- this.portsPerSupervisor = portsPerSupervisor;
- }
-
- public Map getDaemonConf() {
- return daemonConf;
- }
-
- public void setDaemonConf(Map daemonConf) {
- this.daemonConf = daemonConf;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/MkTupleParam.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/MkTupleParam.java b/jstorm-client/src/main/java/backtype/storm/testing/MkTupleParam.java
deleted file mode 100644
index a98704d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/MkTupleParam.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package backtype.storm.testing;
-
-import java.util.ArrayList;
-import java.util.List;
-
-public class MkTupleParam {
- private String stream;
- private String component;
- private List<String> fields;
-
- public String getStream() {
- return stream;
- }
-
- public void setStream(String stream) {
- this.stream = stream;
- }
-
- public String getComponent() {
- return component;
- }
-
- public void setComponent(String component) {
- this.component = component;
- }
-
- public List<String> getFields() {
- return fields;
- }
-
- public void setFields(String... fields) {
- this.fields = new ArrayList<String>();
- for (int i = 0; i < fields.length; i++) {
- this.fields.add(fields[i]);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/MockedSources.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/MockedSources.java b/jstorm-client/src/main/java/backtype/storm/testing/MockedSources.java
deleted file mode 100644
index 6c61edb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/MockedSources.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm.testing;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-
-public class MockedSources {
- /**
- * mocked spout sources for the [spout, stream] pair.
- */
- private Map<String, List<FixedTuple>> data = new HashMap<String, List<FixedTuple>>();
-
- /**
- * add mock data for the spout.
- *
- * @param spoutId
- * the spout to be mocked
- * @param streamId
- * the stream of the spout to be mocked
- * @param objects
- * the mocked data
- */
- public void addMockData(String spoutId, String streamId, Values... valueses) {
- if (!data.containsKey(spoutId)) {
- data.put(spoutId, new ArrayList<FixedTuple>());
- }
-
- List<FixedTuple> tuples = data.get(spoutId);
- for (int i = 0; i < valueses.length; i++) {
- FixedTuple tuple = new FixedTuple(streamId, valueses[i]);
- tuples.add(tuple);
- }
- }
-
- public void addMockData(String spoutId, Values... valueses) {
- this.addMockData(spoutId, Utils.DEFAULT_STREAM_ID, valueses);
- }
-
- public Map<String, List<FixedTuple>> getData() {
- return this.data;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/NGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/NGrouping.java b/jstorm-client/src/main/java/backtype/storm/testing/NGrouping.java
deleted file mode 100644
index 9954b06..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/NGrouping.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.task.WorkerTopologyContext;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
-public class NGrouping implements CustomStreamGrouping {
- int _n;
- List<Integer> _outTasks;
-
- public NGrouping(int n) {
- _n = n;
- }
-
- @Override
- public void prepare(WorkerTopologyContext context, GlobalStreamId stream,
- List<Integer> targetTasks) {
- targetTasks = new ArrayList<Integer>(targetTasks);
- Collections.sort(targetTasks);
- _outTasks = new ArrayList<Integer>();
- for (int i = 0; i < _n; i++) {
- _outTasks.add(targetTasks.get(i));
- }
- }
-
- @Override
- public List<Integer> chooseTasks(int taskId, List<Object> values) {
- return _outTasks;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/NonRichBoltTracker.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/NonRichBoltTracker.java b/jstorm-client/src/main/java/backtype/storm/testing/NonRichBoltTracker.java
deleted file mode 100644
index c5918ad..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/NonRichBoltTracker.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.IBolt;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.RegisteredGlobalState;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class NonRichBoltTracker implements IBolt {
- IBolt _delegate;
- String _trackId;
-
- public NonRichBoltTracker(IBolt delegate, String id) {
- _delegate = delegate;
- _trackId = id;
- }
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _delegate.prepare(stormConf, context, collector);
- }
-
- public void execute(Tuple input) {
- _delegate.execute(input);
- Map stats = (Map) RegisteredGlobalState.getState(_trackId);
- ((AtomicInteger) stats.get("processed")).incrementAndGet();
- }
-
- public void cleanup() {
- _delegate.cleanup();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/OpaqueMemoryTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/OpaqueMemoryTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/OpaqueMemoryTransactionalSpout.java
deleted file mode 100644
index 371c622..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/OpaqueMemoryTransactionalSpout.java
+++ /dev/null
@@ -1,190 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.transactional.partitioned.IOpaquePartitionedTransactionalSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.RegisteredGlobalState;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * This spout only works in local mode.
- */
-public class OpaqueMemoryTransactionalSpout implements
- IOpaquePartitionedTransactionalSpout<MemoryTransactionalSpoutMeta> {
- public static String TX_FIELD = MemoryTransactionalSpout.class.getName()
- + "/id";
-
- private String _id;
- private String _finishedPartitionsId;
- private String _disabledId;
- private int _takeAmt;
- private Fields _outFields;
-
- public OpaqueMemoryTransactionalSpout(
- Map<Integer, List<List<Object>>> partitions, Fields outFields,
- int takeAmt) {
- _id = RegisteredGlobalState.registerState(partitions);
-
- Map<Integer, Boolean> finished = Collections
- .synchronizedMap(new HashMap<Integer, Boolean>());
- _finishedPartitionsId = RegisteredGlobalState.registerState(finished);
-
- Map<Integer, Boolean> disabled = Collections
- .synchronizedMap(new HashMap<Integer, Boolean>());
- _disabledId = RegisteredGlobalState.registerState(disabled);
-
- _takeAmt = takeAmt;
- _outFields = outFields;
- }
-
- public void setDisabled(Integer partition, boolean disabled) {
- getDisabledStatuses().put(partition, disabled);
- }
-
- public boolean isExhaustedTuples() {
- Map<Integer, Boolean> statuses = getFinishedStatuses();
- for (Integer partition : getQueues().keySet()) {
- if (!statuses.containsKey(partition)
- || !getFinishedStatuses().get(partition)) {
- return false;
- }
- }
- return true;
- }
-
- @Override
- public IOpaquePartitionedTransactionalSpout.Emitter<MemoryTransactionalSpoutMeta> getEmitter(
- Map conf, TopologyContext context) {
- return new Emitter(conf);
- }
-
- @Override
- public IOpaquePartitionedTransactionalSpout.Coordinator getCoordinator(
- Map conf, TopologyContext context) {
- return new Coordinator();
- }
-
- class Coordinator implements
- IOpaquePartitionedTransactionalSpout.Coordinator {
- @Override
- public boolean isReady() {
- return true;
- }
-
- @Override
- public void close() {
- }
- }
-
- class Emitter
- implements
- IOpaquePartitionedTransactionalSpout.Emitter<MemoryTransactionalSpoutMeta> {
-
- Integer _maxSpoutPending;
- Map<Integer, Integer> _emptyPartitions = new HashMap<Integer, Integer>();
-
- public Emitter(Map conf) {
- Object c = conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING);
- if (c == null)
- _maxSpoutPending = 1;
- else
- _maxSpoutPending = Utils.getInt(c);
- }
-
- @Override
- public MemoryTransactionalSpoutMeta emitPartitionBatch(
- TransactionAttempt tx, BatchOutputCollector collector,
- int partition, MemoryTransactionalSpoutMeta lastPartitionMeta) {
- if (!Boolean.FALSE.equals(getDisabledStatuses().get(partition))) {
- int index;
- if (lastPartitionMeta == null) {
- index = 0;
- } else {
- index = lastPartitionMeta.index + lastPartitionMeta.amt;
- }
- List<List<Object>> queue = getQueues().get(partition);
- int total = queue.size();
- int left = total - index;
- int toTake = Math.min(left, _takeAmt);
-
- MemoryTransactionalSpoutMeta ret = new MemoryTransactionalSpoutMeta(
- index, toTake);
- for (int i = ret.index; i < ret.index + ret.amt; i++) {
- List<Object> toEmit = new ArrayList<Object>(queue.get(i));
- toEmit.add(0, tx);
- collector.emit(toEmit);
- }
- if (toTake == 0) {
- // this is a pretty hacky way to determine when all the
- // partitions have been committed
- // wait until we've emitted max-spout-pending empty
- // partitions for the partition
- int curr = Utils.get(_emptyPartitions, partition, 0) + 1;
- _emptyPartitions.put(partition, curr);
- if (curr > _maxSpoutPending) {
- getFinishedStatuses().put(partition, true);
- }
- }
- return ret;
- } else {
- return null;
- }
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public int numPartitions() {
- return getQueues().size();
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- List<String> toDeclare = new ArrayList<String>(_outFields.toList());
- toDeclare.add(0, TX_FIELD);
- declarer.declare(new Fields(toDeclare));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Config conf = new Config();
- conf.registerSerialization(MemoryTransactionalSpoutMeta.class);
- return conf;
- }
-
- public void startup() {
- getFinishedStatuses().clear();
- }
-
- public void cleanup() {
- RegisteredGlobalState.clearState(_id);
- RegisteredGlobalState.clearState(_finishedPartitionsId);
- }
-
- private Map<Integer, List<List<Object>>> getQueues() {
- return (Map<Integer, List<List<Object>>>) RegisteredGlobalState
- .getState(_id);
- }
-
- private Map<Integer, Boolean> getFinishedStatuses() {
- return (Map<Integer, Boolean>) RegisteredGlobalState
- .getState(_finishedPartitionsId);
- }
-
- private Map<Integer, Boolean> getDisabledStatuses() {
- return (Map<Integer, Boolean>) RegisteredGlobalState
- .getState(_disabledId);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/PrepareBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/PrepareBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/PrepareBatchBolt.java
deleted file mode 100644
index 207455b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/PrepareBatchBolt.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.List;
-
-public class PrepareBatchBolt extends BaseBasicBolt {
- Fields _outFields;
-
- public PrepareBatchBolt(Fields outFields) {
- _outFields = outFields;
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(_outFields);
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- long id = Utils.secureRandomLong();
- List<Object> toEmit = new ArrayList<Object>();
- toEmit.add(id);
- toEmit.addAll(input.getValues());
- collector.emit(toEmit);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/SpoutTracker.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/SpoutTracker.java b/jstorm-client/src/main/java/backtype/storm/testing/SpoutTracker.java
deleted file mode 100644
index a712ee8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/SpoutTracker.java
+++ /dev/null
@@ -1,88 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.spout.ISpoutOutputCollector;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.utils.RegisteredGlobalState;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class SpoutTracker extends BaseRichSpout {
- IRichSpout _delegate;
- SpoutTrackOutputCollector _tracker;
- String _trackId;
-
- private class SpoutTrackOutputCollector implements ISpoutOutputCollector {
- public int transferred = 0;
- public int emitted = 0;
- public SpoutOutputCollector _collector;
-
- public SpoutTrackOutputCollector(SpoutOutputCollector collector) {
- _collector = collector;
- }
-
- private void recordSpoutEmit() {
- Map stats = (Map) RegisteredGlobalState.getState(_trackId);
- ((AtomicInteger) stats.get("spout-emitted")).incrementAndGet();
-
- }
-
- public List<Integer> emit(String streamId, List<Object> tuple,
- Object messageId) {
- List<Integer> ret = _collector.emit(streamId, tuple, messageId);
- recordSpoutEmit();
- return ret;
- }
-
- public void emitDirect(int taskId, String streamId, List<Object> tuple,
- Object messageId) {
- _collector.emitDirect(taskId, streamId, tuple, messageId);
- recordSpoutEmit();
- }
-
- @Override
- public void reportError(Throwable error) {
- _collector.reportError(error);
- }
- }
-
- public SpoutTracker(IRichSpout delegate, String trackId) {
- _delegate = delegate;
- _trackId = trackId;
- }
-
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _tracker = new SpoutTrackOutputCollector(collector);
- _delegate.open(conf, context, new SpoutOutputCollector(_tracker));
- }
-
- public void close() {
- _delegate.close();
- }
-
- public void nextTuple() {
- _delegate.nextTuple();
- }
-
- public void ack(Object msgId) {
- _delegate.ack(msgId);
- Map stats = (Map) RegisteredGlobalState.getState(_trackId);
- ((AtomicInteger) stats.get("processed")).incrementAndGet();
- }
-
- public void fail(Object msgId) {
- _delegate.fail(msgId);
- Map stats = (Map) RegisteredGlobalState.getState(_trackId);
- ((AtomicInteger) stats.get("processed")).incrementAndGet();
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _delegate.declareOutputFields(declarer);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestAggregatesCounter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestAggregatesCounter.java b/jstorm-client/src/main/java/backtype/storm/testing/TestAggregatesCounter.java
deleted file mode 100644
index 359fa00..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestAggregatesCounter.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import backtype.storm.task.TopologyContext;
-import java.util.HashMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import static backtype.storm.utils.Utils.tuple;
-
-public class TestAggregatesCounter extends BaseRichBolt {
- public static Logger LOG = LoggerFactory.getLogger(TestWordCounter.class);
-
- Map<String, Integer> _counts;
- OutputCollector _collector;
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _collector = collector;
- _counts = new HashMap<String, Integer>();
- }
-
- public void execute(Tuple input) {
- String word = (String) input.getValues().get(0);
- int count = (Integer) input.getValues().get(1);
- _counts.put(word, count);
- int globalCount = 0;
- for (String w : _counts.keySet()) {
- globalCount += _counts.get(w);
- }
- _collector.emit(tuple(globalCount));
- _collector.ack(input);
- }
-
- public void cleanup() {
-
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("agg-global"));
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestConfBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestConfBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/TestConfBolt.java
deleted file mode 100644
index c6fe3d6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestConfBolt.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.Map;
-
-public class TestConfBolt extends BaseBasicBolt {
- Map<String, Object> _componentConf;
- Map<String, Object> _conf;
-
- public TestConfBolt() {
- this(null);
- }
-
- public TestConfBolt(Map<String, Object> componentConf) {
- _componentConf = componentConf;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context) {
- _conf = conf;
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("conf", "value"));
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- String name = input.getString(0);
- collector.emit(new Values(name, _conf.get(name)));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _componentConf;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestGlobalCount.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestGlobalCount.java b/jstorm-client/src/main/java/backtype/storm/testing/TestGlobalCount.java
deleted file mode 100644
index 9f9e421..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestGlobalCount.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Values;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestGlobalCount extends BaseRichBolt {
- public static Logger LOG = LoggerFactory.getLogger(TestWordCounter.class);
-
- private int _count;
- OutputCollector _collector;
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _collector = collector;
- _count = 0;
- }
-
- public void execute(Tuple input) {
- _count++;
- _collector.emit(input, new Values(_count));
- _collector.ack(input);
- }
-
- public void cleanup() {
-
- }
-
- public Fields getOutputFields() {
- return new Fields("global-count");
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("global-count"));
- }
-}
[12/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/test/java/com/alibaba/jstorm/util/queue/DisruptorTest.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/test/java/com/alibaba/jstorm/util/queue/DisruptorTest.java b/jstorm-client/src/test/java/com/alibaba/jstorm/util/queue/DisruptorTest.java
deleted file mode 100644
index c6f4479..0000000
--- a/jstorm-client/src/test/java/com/alibaba/jstorm/util/queue/DisruptorTest.java
+++ /dev/null
@@ -1,545 +0,0 @@
-package com.alibaba.jstorm.util.queue;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
-import junit.framework.Assert;
-
-import org.apache.log4j.Logger;
-import org.junit.Test;
-
-import backtype.storm.utils.DisruptorQueue;
-
-import com.lmax.disruptor.BlockingWaitStrategy;
-import com.lmax.disruptor.EventHandler;
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.dsl.ProducerType;
-
-public class DisruptorTest {
-
- static {
- DisruptorQueue.setUseSleep(true);
- DisruptorQueue.setLimited(true);
- }
-
- private int count = 100000000;
- private int buffer_size = 8 * 1024;
-
- private Logger logger = Logger.getLogger(DisruptorTest.class);
-
- @Test
- public void testMultipleConsume() {
- final DisruptorQueue disruptorQueue = createQueue("test",
- ProducerType.MULTI, 1024);
-
- // new Thread(new Runnable() {
- //
- // @Override
- // public void run() {
- // System.out.println("Begin to produce item");
- // JStormUtils.sleepMs(1000);
- //
- // for (int i = 0; i < 1000000; i++) {
- // disruptorQueue.publish(Integer.valueOf(i));
- // }
- //
- // System.out.println("Finish produce item");
- // }
- // }).start();
- //
- //
- // new Thread(new Runnable() {
- //
- // @Override
- // public void run() {
- // while(true) {
- // disruptorQueue.consumeBatchWhenAvailable(new EventHandler<Object>() {
- //
- // @Override
- // public void onEvent(Object event, long sequence,
- // boolean endOfBatch) throws Exception {
- //
- // System.out.println("Consumer 1:" + (Integer)event);
- // }
- //
- // });
- // }
- //
- // }
- // }).start();
- //
- // new Thread(new Runnable() {
- //
- // @Override
- // public void run() {
- // while(true) {
- // disruptorQueue.consumeBatchWhenAvailable(new EventHandler<Object>() {
- //
- // @Override
- // public void onEvent(Object event, long sequence,
- // boolean endOfBatch) throws Exception {
- //
- // System.out.println("Consumer 2:" + (Integer)event);
- // }
- //
- // });
- // }
- //
- // }
- // }).start();
- //
- // JStormUtils.sleepMs(100000);
- }
-
- private final static int TIMEOUT = 5; // MS
- private final static int PRODUCER_NUM = 4;
-
- @Test
- public void testLaterStartConsumer() throws InterruptedException {
- System.out
- .println("!!!!!!!!!!!!!!!Begin testLaterStartConsumer!!!!!!!!!!");
- final AtomicBoolean messageConsumed = new AtomicBoolean(false);
-
- // Set queue length to 1, so that the RingBuffer can be easily full
- // to trigger consumer blocking
- DisruptorQueue queue = createQueue("consumerHang", ProducerType.MULTI,
- 2);
- push(queue, 1);
- Runnable producer = new Producer(queue);
- Runnable consumer = new Consumer(queue, new EventHandler<Object>() {
- long count = 0;
-
- @Override
- public void onEvent(Object obj, long sequence, boolean endOfBatch)
- throws Exception {
-
- messageConsumed.set(true);
- System.out.println("Consume " + count++);
- }
- });
-
- run(producer, 0, 0, consumer, 50);
- Assert.assertTrue(
- "disruptor message is never consumed due to consumer thread hangs",
- messageConsumed.get());
-
- System.out
- .println("!!!!!!!!!!!!!!!!End testLaterStartConsumer!!!!!!!!!!");
- }
-
- @Test
- public void testBeforeStartConsumer() throws InterruptedException {
- System.out
- .println("!!!!!!!!!!!!Begin testBeforeStartConsumer!!!!!!!!!");
- final AtomicBoolean messageConsumed = new AtomicBoolean(false);
-
- // Set queue length to 1, so that the RingBuffer can be easily full
- // to trigger consumer blocking
- DisruptorQueue queue = createQueue("consumerHang", ProducerType.MULTI,
- 2);
- queue.consumerStarted();
- push(queue, 1);
- Runnable producer = new Producer(queue);
- Runnable consumer = new Consumer(queue, new EventHandler<Object>() {
- long count = 0;
-
- @Override
- public void onEvent(Object obj, long sequence, boolean endOfBatch)
- throws Exception {
-
- messageConsumed.set(true);
- System.out.println("Consume " + count++);
- }
- });
-
- run(producer, 0, 0, consumer, 50);
- Assert.assertTrue(
- "disruptor message is never consumed due to consumer thread hangs",
- messageConsumed.get());
-
- System.out
- .println("!!!!!!!!!!!!!End testBeforeStartConsumer!!!!!!!!!!");
- }
-
- @Test
- public void testSingleProducer() throws InterruptedException {
- System.out
- .println("!!!!!!!!!!!!!!Begin testSingleProducer!!!!!!!!!!!!!!");
- final AtomicBoolean messageConsumed = new AtomicBoolean(false);
-
- // Set queue length to 1, so that the RingBuffer can be easily full
- // to trigger consumer blocking
- DisruptorQueue queue = createQueue("consumerHang", ProducerType.SINGLE,
- 1);
- push(queue, 1);
- Runnable producer = new Producer(queue);
- Runnable consumer = new Consumer(queue, new EventHandler<Object>() {
- long count = 0;
-
- @Override
- public void onEvent(Object obj, long sequence, boolean endOfBatch)
- throws Exception {
-
- messageConsumed.set(true);
- System.out.println("Consume " + count++);
- }
- });
-
- run(producer, 0, 0, consumer, 50);
- Assert.assertTrue(
- "disruptor message is never consumed due to consumer thread hangs",
- messageConsumed.get());
-
- System.out
- .println("!!!!!!!!!!!!!!End testSingleProducer!!!!!!!!!!!!!!");
- }
-
- public static AtomicLong produceNum = new AtomicLong(0);
- public static AtomicLong consumerNum = new AtomicLong(0);
-
- public static EventHandlerTest handler = new EventHandlerTest();
-
- public static void resetNum() {
- produceNum.set(0);
- consumerNum.set(0);
- handler.reset();
-
- }
-
- @Test
- public void testMessageDisorder() throws InterruptedException {
-
- System.out
- .println("!!!!!!!!!!!!!!!!Begin testMessageDisorder!!!!!!!!!!");
- // Set queue length to bigger enough
- DisruptorQueue queue = createQueue("messageOrder", ProducerType.MULTI,
- 128);
-
- queue.publish("1");
-
- Runnable producer = new Producer(queue);
-
- final Object[] result = new Object[1];
- Runnable consumer = new Consumer(queue, new EventHandler<Object>() {
- private boolean head = true;
- private Map<String, Long> lastIdMap = new HashMap<String, Long>();
-
- @Override
- public void onEvent(Object obj, long sequence, boolean endOfBatch)
- throws Exception {
- consumerNum.incrementAndGet();
- if (head) {
- head = false;
- result[0] = obj;
- } else {
- String event = (String) obj;
- String[] item = event.split("@");
- Long current = Long.valueOf(item[1]);
- Long last = lastIdMap.get(item[0]);
- if (last != null) {
- if (current <= last) {
- String msg = "Consume disorder of " + item[0]
- + ", current" + current + ",last:" + last;
- System.err
- .println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
- System.err.println(msg);
- System.err
- .println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
- Assert.fail(msg);
- }
- }
-
- lastIdMap.put(item[0], current);
-
- }
- }
- });
-
- run(producer, PRODUCER_NUM, 1000, consumer, 30000);
- Assert.assertEquals(
- "We expect to receive first published message first, but received "
- + result[0], "1", result[0]);
- produceNum.incrementAndGet();
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out.println("!!!!!!!!!!!!!!End testMessageDisorder!!!!!!!!!!!!");
- }
-
- @Test
- public void testPull() {
- // @@@ TODO
- }
-
- @Test
- public void testTake() {
- // @@@ TODO
- }
-
- public void push(DisruptorQueue queue, int num) {
- for (int i = 0; i < num; i++) {
- String msg = String.valueOf(Thread.currentThread().getId()) + "@"
- + i;
- try {
- queue.publish(msg, false);
- } catch (InsufficientCapacityException e) {
- e.printStackTrace();
- }
- produceNum.incrementAndGet();
- System.out.println(Thread.currentThread().getId()
- + " Publish one :" + i);
- }
- }
-
- @Test
- public void testConsumeBatchWhenAvailable() {
- System.out
- .println("!!!!!!!!!!!!!!!Begin testConsumeBatchWhenAvailable!!!!!!!!!!!!");
-
- resetNum();
-
- // Set queue length to bigger enough
- DisruptorQueue queue = createQueue("messageOrder", ProducerType.MULTI,
- 128);
-
- push(queue, 128);
-
- queue.consumeBatchWhenAvailable(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out
- .println("!!!!!! finish testConsumeBatchWhenAvailable test 1");
- resetNum();
-
- queue.consumerStarted();
-
- push(queue, 128);
-
- queue.consumeBatchWhenAvailable(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out
- .println("!!!!!! finish testConsumeBatchWhenAvailable test 2");
-
- System.out
- .println("!!!!!!!!!!!!!!!Finsh testConsumeBatchWhenAvailable for MULTI!!!!!!!!!!!!");
-
- resetNum();
- // Set queue length to bigger enough
- DisruptorQueue queue2 = createQueue("messageOrder",
- ProducerType.SINGLE, 128);
-
- push(queue2, 128);
-
- queue2.consumeBatchWhenAvailable(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out
- .println("!!!!!! finish testConsumeBatchWhenAvailable test 3");
- resetNum();
-
- queue2.consumerStarted();
-
- push(queue2, 128);
-
- queue2.consumeBatchWhenAvailable(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out
- .println("!!!!!! finish testConsumeBatchWhenAvailable test 4");
-
- System.out
- .println("!!!!!!!!!!!!!!!Finsh testConsumeBatchWhenAvailable for single !!!!!!!!!!!!");
- System.out
- .println("!!!!!!!!!!!!!End testConsumeBatchWhenAvailable!!!!!!!!!!!");
- }
-
- @Test
- public void testTryConsume() {
- System.out.println("!!!!!!!!!!!!Begin testTryConsume!!!!!!!!!!!!!!!!");
-
- resetNum();
- // Set queue length to bigger enough
- DisruptorQueue queue = createQueue("messageOrder", ProducerType.MULTI,
- 128);
-
- push(queue, 128);
-
- queue.consumeBatch(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out.println("!!!!!! finish testTryConsume test 1");
- resetNum();
-
- queue.consumerStarted();
-
- push(queue, 128);
-
- queue.consumeBatch(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out.println("!!!!!! finish testTryConsume test 2");
-
- resetNum();
- // Set queue length to bigger enough
- DisruptorQueue queue2 = createQueue("messageOrder",
- ProducerType.SINGLE, 128);
-
- push(queue2, 128);
-
- queue2.consumeBatch(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out.println("!!!!!! finish testTryConsume test 3");
- resetNum();
-
- queue2.consumerStarted();
-
- push(queue2, 128);
-
- queue2.consumeBatch(handler);
-
- Assert.assertEquals("produce: " + produceNum.get() + ", consume:"
- + consumerNum.get(), produceNum.get(), consumerNum.get());
- System.out.println("!!!!!! finish testTryConsume test 4");
-
- System.out.println("!!!!!!!!!!!!!!!!!End testTryConsume!!!!!!!!!!!!!!");
- }
-
- private void run(Runnable producer, int producerNum, long produceMs,
- Runnable consumer, long waitMs) {
- try {
-
- resetNum();
-
- Thread[] producerThreads = new Thread[producerNum];
- for (int i = 0; i < producerNum; i++) {
- producerThreads[i] = new Thread(producer);
- producerThreads[i].start();
- }
-
- Thread consumerThread = new Thread(consumer);
- consumerThread.start();
- System.out.println("Please wait seconds" + produceMs / 1000);
-
- Thread.sleep(produceMs);
-
- for (int i = 0; i < producerNum; i++) {
- producerThreads[i].interrupt();
- producerThreads[i].stop();
- producerThreads[i].join(TIMEOUT);
- }
-
- Thread.sleep(waitMs);
- System.out.println("Please wait seconds" + waitMs / 1000);
-
- consumerThread.interrupt();
- consumerThread.stop();
- consumerThread.join(TIMEOUT);
- } catch (Throwable e) {
- e.printStackTrace();
- }
- }
-
- private class Producer implements Runnable {
- private String msg;
- private DisruptorQueue queue;
-
- Producer(DisruptorQueue queue) {
- this.queue = queue;
- }
-
- @Override
- public void run() {
- long count = 0;
- try {
- while (true) {
-
- String msg = String.valueOf(Thread.currentThread().getId())
- + "@" + count;
- queue.publish(msg, false);
- produceNum.incrementAndGet();
- System.out.println(msg);
- count++;
- }
- } catch (InsufficientCapacityException e) {
- System.out.println(Thread.currentThread().getId()
- + " quit, insufficientCapacityException " + count);
- return;
- }catch (Exception e) {
- System.out.println(Thread.currentThread().getId()
- + " quit, Exception " + count);
- return;
- }
- }
- }
-
- private class Consumer implements Runnable {
- private EventHandler handler;
- private DisruptorQueue queue;
-
- Consumer(DisruptorQueue queue, EventHandler handler) {
- this.handler = handler;
- this.queue = queue;
- }
-
- @Override
- public void run() {
- queue.consumerStarted();
- try {
- while (true) {
- queue.consumeBatchWhenAvailable(handler);
- }
- } catch (Exception e) {
- // break
- }
- }
- }
-
- static class EventHandlerTest implements EventHandler<Object> {
- private Map<String, Long> lastIdMap = new HashMap<String, Long>();
-
- public void reset() {
- lastIdMap.clear();
- }
-
- @Override
- public void onEvent(Object obj, long sequence, boolean endOfBatch)
- throws Exception {
-
- String event = (String) obj;
- String[] item = event.split("@");
- Long current = Long.valueOf(item[1]);
- Long last = lastIdMap.get(item[0]);
- if (last != null) {
- if (current <= last) {
- String msg = "Consume disorder of " + item[0] + ", current"
- + current + ",last:" + last;
- System.err.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
- System.err.println(msg + "," + event);
- System.err.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
- Assert.fail(msg);
- }
- }
-
- lastIdMap.put(item[0], current);
- consumerNum.incrementAndGet();
- }
- };
-
- private static DisruptorQueue createQueue(String name, ProducerType type,
- int queueSize) {
-
-
- return DisruptorQueue.mkInstance(name, type, queueSize,
- new BlockingWaitStrategy());
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.classpath
----------------------------------------------------------------------
diff --git a/jstorm-core/.classpath b/jstorm-core/.classpath
new file mode 100755
index 0000000..f0a60b6
--- /dev/null
+++ b/jstorm-core/.classpath
@@ -0,0 +1,37 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" output="target/classes" path="src/main/java">
+ <attributes>
+ <attribute name="optional" value="true"/>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="src" output="target/test-classes" path="src/test/java">
+ <attributes>
+ <attribute name="optional" value="true"/>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ <attribute name="org.eclipse.jst.component.nondependency" value=""/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
+ <attributes>
+ <attribute name="owner.project.facets" value="java"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="output" path="target/classes"/>
+</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.gitignore
----------------------------------------------------------------------
diff --git a/jstorm-core/.gitignore b/jstorm-core/.gitignore
new file mode 100755
index 0000000..b83d222
--- /dev/null
+++ b/jstorm-core/.gitignore
@@ -0,0 +1 @@
+/target/
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.project
----------------------------------------------------------------------
diff --git a/jstorm-core/.project b/jstorm-core/.project
new file mode 100755
index 0000000..617507f
--- /dev/null
+++ b/jstorm-core/.project
@@ -0,0 +1,36 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>jstorm-core</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.wst.common.project.facet.core.builder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.wst.validation.validationbuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.m2e.core.maven2Builder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
+ <nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ <nature>org.eclipse.m2e.core.maven2Nature</nature>
+ <nature>org.eclipse.wst.common.project.facet.core.nature</nature>
+ </natures>
+</projectDescription>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.core.resources.prefs b/jstorm-core/.settings/org.eclipse.core.resources.prefs
new file mode 100755
index 0000000..04cfa2c
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,6 @@
+eclipse.preferences.version=1
+encoding//src/main/java=UTF-8
+encoding//src/main/resources=UTF-8
+encoding//src/test/java=UTF-8
+encoding//src/test/resources=UTF-8
+encoding/<project>=UTF-8
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.jdt.core.prefs b/jstorm-core/.settings/org.eclipse.jdt.core.prefs
new file mode 100755
index 0000000..c788ee3
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,8 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
+org.eclipse.jdt.core.compiler.compliance=1.7
+org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
+org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.7
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.m2e.core.prefs b/jstorm-core/.settings/org.eclipse.m2e.core.prefs
new file mode 100755
index 0000000..14b697b
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.m2e.core.prefs
@@ -0,0 +1,4 @@
+activeProfiles=
+eclipse.preferences.version=1
+resolveWorkspaceProjects=true
+version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.wst.common.component
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.common.component b/jstorm-core/.settings/org.eclipse.wst.common.component
new file mode 100755
index 0000000..aaa3793
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.wst.common.component
@@ -0,0 +1,6 @@
+<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
+ <wb-module deploy-name="jstorm-core">
+ <wb-resource deploy-path="/" source-path="/src/main/java"/>
+ <wb-resource deploy-path="/" source-path="/src/main/resources"/>
+ </wb-module>
+</project-modules>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml b/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
new file mode 100755
index 0000000..4f92af5
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
@@ -0,0 +1,5 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<faceted-project>
+ <installed facet="jst.utility" version="1.0"/>
+ <installed facet="java" version="1.7"/>
+</faceted-project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/.settings/org.eclipse.wst.validation.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.validation.prefs b/jstorm-core/.settings/org.eclipse.wst.validation.prefs
new file mode 100644
index 0000000..04cad8c
--- /dev/null
+++ b/jstorm-core/.settings/org.eclipse.wst.validation.prefs
@@ -0,0 +1,2 @@
+disabled=06target
+eclipse.preferences.version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-core/pom.xml b/jstorm-core/pom.xml
new file mode 100755
index 0000000..2eab1b8
--- /dev/null
+++ b/jstorm-core/pom.xml
@@ -0,0 +1,254 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+
+ <parent>
+ <groupId>com.alibaba.jstorm</groupId>
+ <artifactId>jstorm-all</artifactId>
+ <version>2.0.4-SNAPSHOT</version>
+ <relativePath>..</relativePath>
+ </parent>
+ <!-- <parent>
+ <groupId>com.taobao</groupId>
+ <artifactId>parent</artifactId>
+ <version>1.0.2</version>
+ </parent> -->
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>com.alibaba.jstorm</groupId>
+ <artifactId>jstorm-core</artifactId>
+
+ <packaging>jar</packaging>
+ <name>${project.artifactId}-${project.version}</name>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-surefire-plugin</artifactId>
+ <configuration>
+ <forkMode>pertest</forkMode>
+ <argLine>-Xms1024m -Xmx3072m</argLine>
+ </configuration>
+ </plugin>
+ <plugin>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.3.2</version>
+ <configuration>
+ <source>1.7</source>
+ <target>1.7</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-source-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>attach-sources</id>
+ <goals>
+ <goal>jar</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+
+ <properties>
+ <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+ <powermock.version>1.4.11</powermock.version>
+ <metrics.version>3.1.2</metrics.version>
+ </properties>
+ <dependencies>
+ <dependency>
+ <groupId>org.clojure</groupId>
+ <artifactId>clojure</artifactId>
+ <version>1.6.0</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-io</groupId>
+ <artifactId>commons-io</artifactId>
+ <version>2.4</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.commons</groupId>
+ <artifactId>commons-exec</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-lang</groupId>
+ <artifactId>commons-lang</artifactId>
+ <version>2.5</version>
+ </dependency>
+ <dependency>
+ <groupId>commons-cli</groupId>
+ <artifactId>commons-cli</artifactId>
+ <version>1.2</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.httpcomponents</groupId>
+ <artifactId>httpclient</artifactId>
+ <version>4.3.3</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.thrift</groupId>
+ <artifactId>libthrift</artifactId>
+ <version>0.9.2</version>
+ <scope>compile</scope>
+ <exclusions>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-api</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>clj-time</groupId>
+ <artifactId>clj-time</artifactId>
+ <version>0.8.0</version>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.curator</groupId>
+ <artifactId>curator-framework</artifactId>
+ <version>2.5.0</version>
+ <exclusions>
+ <exclusion>
+ <groupId>log4j</groupId>
+ <artifactId>log4j</artifactId>
+ </exclusion>
+ <exclusion>
+ <groupId>org.slf4j</groupId>
+ <artifactId>slf4j-log4j12</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <dependency>
+ <groupId>com.esotericsoftware.kryo</groupId>
+ <artifactId>kryo</artifactId>
+ <version>2.21</version>
+ <exclusions>
+ <exclusion>
+ <groupId>com.esotericsoftware.minlog</groupId>
+ <artifactId>minlog</artifactId>
+ </exclusion>
+ </exclusions>
+ </dependency>
+ <!-- keep compatible with storm, some old project use gson -->
+ <dependency>
+ <groupId>com.googlecode.json-simple</groupId>
+ <artifactId>json-simple</artifactId>
+ <version>1.1</version>
+ </dependency>
+ <dependency>
+ <groupId>com.twitter</groupId>
+ <artifactId>carbonite</artifactId>
+ <version>1.4.0</version>
+ </dependency>
+
+ <dependency>
+ <groupId>org.yaml</groupId>
+ <artifactId>snakeyaml</artifactId>
+ <version>1.11</version>
+ </dependency>
+ <dependency>
+ <groupId>com.lmax</groupId>
+ <artifactId>disruptor</artifactId>
+ <version>3.2.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.netty</groupId>
+ <artifactId>netty</artifactId>
+ <version>3.9.0.Final</version>
+ </dependency>
+ <dependency>
+ <groupId>org.jgrapht</groupId>
+ <artifactId>jgrapht-core</artifactId>
+ <version>0.9.0</version>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.10</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-module-junit4</artifactId>
+ <version>${powermock.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>ch.qos.logback</groupId>
+ <artifactId>logback-classic</artifactId>
+ <version>1.0.13</version>
+ </dependency>
+ <dependency>
+ <groupId>org.slf4j</groupId>
+ <artifactId>log4j-over-slf4j</artifactId>
+ <version>1.6.6</version>
+ </dependency>
+ <!--
+ <dependency>
+ <groupId>org.codehaus.plexus</groupId>
+ <artifactId>plexus-compiler-javac</artifactId>
+ <version>1.8.1</version>
+ </dependency>
+ -->
+ <dependency>
+ <groupId>com.google.code.gson</groupId>
+ <artifactId>gson</artifactId>
+ <version>2.3.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.powermock</groupId>
+ <artifactId>powermock-module-junit4</artifactId>
+ <version>${powermock.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard.metrics</groupId>
+ <artifactId>metrics-core</artifactId>
+ <version>${metrics.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard.metrics</groupId>
+ <artifactId>metrics-healthchecks</artifactId>
+ <version>${metrics.version}</version>
+ </dependency>
+ <dependency>
+ <groupId>io.dropwizard.metrics</groupId>
+ <artifactId>metrics-jvm</artifactId>
+ <version>${metrics.version}</version>
+ </dependency>
+
+ <!-- <dependency> <groupId>ch.qos.logback</groupId> <artifactId>logback-classic</artifactId>
+ <version>1.0.13</version> </dependency> <dependency> <groupId>org.slf4j</groupId>
+ <artifactId>log4j-over-slf4j</artifactId> <version>1.7.5</version> </dependency> -->
+ <dependency>
+ <groupId>javax.servlet</groupId>
+ <artifactId>servlet-api</artifactId>
+ <version>2.5</version>
+ </dependency>
+ <dependency>
+ <groupId>org.rocksdb</groupId>
+ <artifactId>rocksdbjni</artifactId>
+ <version>3.10.1</version>
+ </dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <version>1.7.1</version>
+ <scope>test</scope>
+ </dependency>
+
+ <dependency>
+ <groupId>org.mockito</groupId>
+ <artifactId>mockito-all</artifactId>
+ <version>1.10.19</version>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
[57/60] [abbrv] storm git commit: remove jstorm-utility directory
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/pom.xml b/jstorm-utility/ons/pom.xml
deleted file mode 100755
index fc8f06a..0000000
--- a/jstorm-utility/ons/pom.xml
+++ /dev/null
@@ -1,101 +0,0 @@
-<?xml version="1.0"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>com.taobao</groupId>
- <artifactId>parent</artifactId>
- <version>1.0.2</version>
- </parent>
-
- <!--
- <parent>
- <groupId>com.alibaba.aloha</groupId>
- <artifactId>aloha-utility</artifactId>
- <version>0.2.0-SNAPSHOT</version>
- </parent> -->
- <groupId>com.alibaba.aloha</groupId>
- <artifactId>ons</artifactId>
- <version>0.2.0</version>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
- <properties>
- <jstorm.version>0.9.7</jstorm.version>
- </properties>
-
- <dependencies>
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- </dependency>
-
- <dependency>
- <groupId>com.aliyun.openservices</groupId>
- <artifactId>ons-client</artifactId>
- <version>1.1.5</version>
- </dependency>
- <!--
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-common</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-client</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-remoting</artifactId>
- <version>3.0.1</version>
- </dependency>
- -->
-
- </dependencies>
-
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/LoadConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/LoadConfig.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/LoadConfig.java
deleted file mode 100755
index 6f062eb..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/LoadConfig.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package com.alibaba.jstorm;
-
-import org.yaml.snakeyaml.Yaml;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-public class LoadConfig {
- public static final String TOPOLOGY_TYPE = "topology.type";
-
- private static Map LoadProperty(String prop) {
- Map ret = null;
- Properties properties = new Properties();
-
- try {
- InputStream stream = new FileInputStream(prop);
- properties.load(stream);
- ret = new HashMap<Object, Object>();
- ret.putAll(properties);
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + prop);
- } catch (Exception e1) {
- e1.printStackTrace();
- }
-
- return ret;
- }
-
- private static Map LoadYaml(String confPath) {
- Map ret = null;
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
-
- ret = (Map) yaml.load(stream);
- if (ret == null || ret.isEmpty() == true) {
- throw new RuntimeException("Failed to read config file");
- }
-
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- return ret;
- }
-
- public static Map LoadConf(String arg) {
- Map ret = null;
-
- if (arg.endsWith("yaml")) {
- ret = LoadYaml(arg);
- } else {
- ret = LoadProperty(arg);
- }
-
- return ret;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/TestTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/TestTopology.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/TestTopology.java
deleted file mode 100755
index 9ad17de..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/TestTopology.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.alibaba.jstorm;
-
-import backtype.storm.Config;
-import backtype.storm.LocalCluster;
-import backtype.storm.StormSubmitter;
-import backtype.storm.topology.TopologyBuilder;
-
-import com.alibaba.jstorm.ons.consumer.ConsumerSpout;
-import com.alibaba.jstorm.ons.producer.ProducerBolt;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-import java.util.HashMap;
-import java.util.Map;
-
-public class TestTopology {
-
- private static Map conf = new HashMap<Object, Object>();
-
- public static void main(String[] args) throws Exception {
- if (args.length == 0) {
- System.err.println("Please input configuration file");
- System.exit(-1);
- }
-
- conf = LoadConfig.LoadConf(args[0]);
-
- TopologyBuilder builder = setupBuilder();
-
- submitTopology(builder);
-
- }
-
- private static TopologyBuilder setupBuilder() throws Exception {
- TopologyBuilder builder = new TopologyBuilder();
-
- int writerParallel = JStormUtils.parseInt(conf.get("topology.producer.parallel"), 1);
-
- int spoutParallel = JStormUtils.parseInt(conf.get("topology.consumer.parallel"), 1);
-
- builder.setSpout("OnsConsumer", new ConsumerSpout(), spoutParallel);
-
- builder.setBolt("OnsProducer", new ProducerBolt(), writerParallel).localFirstGrouping("OnsConsumer");
-
- return builder;
- }
-
- private static void submitTopology(TopologyBuilder builder) {
- try {
- if (local_mode(conf)) {
-
- LocalCluster cluster = new LocalCluster();
-
- cluster.submitTopology(String.valueOf(conf.get("topology.name")), conf, builder.createTopology());
-
- Thread.sleep(200000);
-
- cluster.shutdown();
- } else {
- StormSubmitter.submitTopology(String.valueOf(conf.get("topology.name")), conf,
- builder.createTopology());
- }
-
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- public static boolean local_mode(Map conf) {
- String mode = (String) conf.get(Config.STORM_CLUSTER_MODE);
- if (mode != null) {
- if (mode.equals("local")) {
- return true;
- }
- }
-
- return false;
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsConfig.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsConfig.java
deleted file mode 100644
index c8a9b63..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsConfig.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package com.alibaba.jstorm.ons;
-
-import java.io.Serializable;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-import com.aliyun.openservices.ons.api.PropertyKeyConst;
-
-public class OnsConfig implements Serializable{
-
- private static final long serialVersionUID = -3911741873533333336L;
-
- private final String topic;
- private final String subExpress;
- private final String accessKey;
- private final String secretKey;
-
- public OnsConfig(Map conf) {
- topic = (String)conf.get("Topic");
- if (conf.get("SubExpress") != null) {
- subExpress = (String)conf.get("SubExpress");
- }else {
- subExpress = "*";
- }
- accessKey = (String)conf.get(PropertyKeyConst.AccessKey);
- secretKey = (String)conf.get(PropertyKeyConst.SecretKey);
-
- checkValid();
-
- }
-
- public void checkValid() {
- if (StringUtils.isBlank(topic) == true) {
- throw new RuntimeException("Topic hasn't been set");
- }else if (StringUtils.isBlank(subExpress)) {
- throw new RuntimeException("SubExpress hasn't been set");
- }else if (StringUtils.isBlank(accessKey)) {
- throw new RuntimeException(PropertyKeyConst.AccessKey + " hasn't been set");
- }else if (StringUtils.isBlank(secretKey)) {
- throw new RuntimeException(PropertyKeyConst.SecretKey + " hasn't been set");
- }
-
- }
-
- public String getTopic() {
- return topic;
- }
-
- public String getSubExpress() {
- return subExpress;
- }
-
- public String getAccessKey() {
- return accessKey;
- }
-
- public String getSecretKey() {
- return secretKey;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsTuple.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsTuple.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsTuple.java
deleted file mode 100644
index c3a3e5d..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/OnsTuple.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package com.alibaba.jstorm.ons;
-
-import com.aliyun.openservices.ons.api.Message;
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-import java.io.Serializable;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class OnsTuple implements Serializable {
-
- /** */
- private static final long serialVersionUID = 2277714452693486955L;
-
- protected final Message message;
-
- protected final AtomicInteger failureTimes;
- protected final long createMs;
- protected long emitMs;
-
- protected transient CountDownLatch latch;
- protected transient boolean isSuccess;
-
- public OnsTuple(Message message) {
- this.message = message;
-
- this.failureTimes = new AtomicInteger(0);
- this.createMs = System.currentTimeMillis();
-
- this.latch = new CountDownLatch(1);
- this.isSuccess = false;
- }
-
- public AtomicInteger getFailureTimes() {
- return failureTimes;
- }
-
- public long getCreateMs() {
- return createMs;
- }
-
- public long getEmitMs() {
- return emitMs;
- }
-
- public void updateEmitMs() {
- this.emitMs = System.currentTimeMillis();
- }
-
- public Message getMessage() {
- return message;
- }
-
- public boolean waitFinish() throws InterruptedException {
- return latch.await(4, TimeUnit.HOURS);
- }
-
- public void done() {
- isSuccess = true;
- latch.countDown();
- }
-
- public void fail() {
- isSuccess = false;
- latch.countDown();
- }
-
- public boolean isSuccess() {
- return isSuccess;
- }
-
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerConfig.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerConfig.java
deleted file mode 100644
index 00fac77..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerConfig.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.alibaba.jstorm.ons.consumer;
-
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-
-import com.alibaba.jstorm.ons.OnsConfig;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.aliyun.openservices.ons.api.PropertyKeyConst;
-
-public class ConsumerConfig extends OnsConfig{
-
- private static final long serialVersionUID = 4292162795544528064L;
- private final String consumerId;
- private final int consumerThreadNum;
-
-
- private final String nameServer;
-
-
- public ConsumerConfig(Map conf) {
- super(conf);
-
- consumerId = (String)conf.get(PropertyKeyConst.ConsumerId);
- if (StringUtils.isBlank(consumerId)) {
- throw new RuntimeException(PropertyKeyConst.ConsumerId + " hasn't been set");
- }
- consumerThreadNum = JStormUtils.parseInt(
- conf.get(PropertyKeyConst.ConsumeThreadNums), 4);
-
- nameServer = (String)conf.get(PropertyKeyConst.NAMESRV_ADDR);
- if (nameServer != null) {
- String namekey = "rocketmq.namesrv.domain";
-
- String value = System.getProperty(namekey);
- if (value == null) {
-
- System.setProperty(namekey, nameServer);
- } else if (value.equals(nameServer) == false) {
- throw new RuntimeException("Different nameserver address in the same worker " + value + ":"
- + nameServer);
-
- }
- }
-
- }
-
-
- public String getConsumerId() {
- return consumerId;
- }
-
-
- public int getConsumerThreadNum() {
- return consumerThreadNum;
- }
-
-
- public String getNameServer() {
- return nameServer;
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerFactory.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerFactory.java
deleted file mode 100644
index b16a5c6..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerFactory.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.alibaba.jstorm.ons.consumer;
-
-import com.aliyun.openservices.ons.api.Consumer;
-import com.aliyun.openservices.ons.api.MessageListener;
-import com.aliyun.openservices.ons.api.ONSFactory;
-import com.aliyun.openservices.ons.api.PropertyKeyConst;
-import org.apache.log4j.Logger;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-public class ConsumerFactory {
-
- private static final Logger LOG = Logger.getLogger(ConsumerFactory.class);
-
- public static Map<String, Consumer> consumers = new HashMap<String, Consumer>();
-
- public static synchronized Consumer mkInstance(ConsumerConfig consumerConfig, MessageListener listener) throws Exception {
-
-
- String consumerId = consumerConfig.getConsumerId();
- Consumer consumer = consumers.get(consumerId);
- if (consumer != null) {
-
- LOG.info("Consumer of " + consumerId + " has been created, don't recreate it ");
-
- // Attention, this place return null to info duplicated consumer
- return null;
- }
-
- Properties properties = new Properties();
- properties.put(PropertyKeyConst.AccessKey, consumerConfig.getAccessKey());
- properties.put(PropertyKeyConst.SecretKey, consumerConfig.getSecretKey());
- properties.put(PropertyKeyConst.ConsumerId, consumerId);
- properties.put(PropertyKeyConst.ConsumeThreadNums, consumerConfig.getConsumerThreadNum());
- consumer = ONSFactory.createConsumer(properties);
-
- consumer.subscribe(consumerConfig.getTopic(), consumerConfig.getSubExpress(), listener);
- consumer.start();
-
- consumers.put(consumerId, consumer);
- LOG.info("Successfully create " + consumerId + " consumer");
-
- return consumer;
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerSpout.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerSpout.java
deleted file mode 100644
index b32186d..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/consumer/ConsumerSpout.java
+++ /dev/null
@@ -1,268 +0,0 @@
-package com.alibaba.jstorm.ons.consumer;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import com.alibaba.jstorm.client.metric.MetricClient;
-import com.alibaba.jstorm.client.spout.IAckValueSpout;
-import com.alibaba.jstorm.client.spout.IFailValueSpout;
-import com.alibaba.jstorm.metric.JStormHistogram;
-import com.alibaba.jstorm.ons.OnsTuple;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.aliyun.openservices.ons.api.*;
-import org.apache.log4j.Logger;
-
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.atomic.AtomicInteger;
-
-
-public class ConsumerSpout implements IRichSpout, IAckValueSpout, IFailValueSpout, MessageListener {
- /** */
- private static final long serialVersionUID = 8476906628618859716L;
- private static final Logger LOG = Logger.getLogger(ConsumerSpout.class);
-
- public static final String ONS_SPOUT_FLOW_CONTROL = "OnsSpoutFlowControl";
- public static final String ONS_SPOUT_AUTO_ACK = "OnsSpoutAutoAck";
- public static final String ONS_MSG_MAX_FAIL_TIMES = "OnsMsgMaxFailTimes";
-
- protected SpoutOutputCollector collector;
- protected transient Consumer consumer;
- protected transient ConsumerConfig consumerConfig;
-
- protected Map conf;
- protected String id;
- protected boolean flowControl;
- protected boolean autoAck;
- protected long maxFailTimes;
- protected boolean active = true;
-
- protected transient LinkedBlockingDeque<OnsTuple> sendingQueue;
-
- protected transient MetricClient metricClient;
- protected transient JStormHistogram waithHistogram;
- protected transient JStormHistogram processHistogram;
-
-
- public ConsumerSpout() {
-
- }
-
-
- public void initMetricClient(TopologyContext context) {
- metricClient = new MetricClient(context);
- waithHistogram = metricClient.registerHistogram("OnsTupleWait", null);
- processHistogram = metricClient.registerHistogram("OnsTupleProcess", null);
- }
-
-
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- this.conf = conf;
- this.collector = collector;
- this.id = context.getThisComponentId() + ":" + context.getThisTaskId();
- this.sendingQueue = new LinkedBlockingDeque<OnsTuple>();
-
- this.flowControl = JStormUtils.parseBoolean(conf.get(ONS_SPOUT_FLOW_CONTROL), true);
- this.autoAck = JStormUtils.parseBoolean(conf.get(ONS_SPOUT_AUTO_ACK), false);
- this.maxFailTimes = JStormUtils.parseLong(conf.get(ONS_MSG_MAX_FAIL_TIMES), 5);
-
- StringBuilder sb = new StringBuilder();
- sb.append("Begin to init MetaSpout:").append(id);
- sb.append(", flowControl:").append(flowControl);
- sb.append(", autoAck:").append(autoAck);
- LOG.info(sb.toString());
-
- initMetricClient(context);
-
- try {
- consumerConfig = new ConsumerConfig(conf);
- consumer = ConsumerFactory.mkInstance(consumerConfig, this);
- }
- catch (Exception e) {
- LOG.error("Failed to create Meta Consumer ", e);
- throw new RuntimeException("Failed to create MetaConsumer" + id, e);
- }
-
- if (consumer == null) {
- LOG.warn(id + " already exist consumer in current worker, don't need to fetch data ");
-
- new Thread(new Runnable() {
-
- @Override
- public void run() {
- while (true) {
- try {
- Thread.sleep(10000);
- }
- catch (InterruptedException e) {
- break;
- }
-
- StringBuilder sb = new StringBuilder();
- sb.append("Only on meta consumer can be run on one process,");
- sb.append(" but there are mutliple spout consumes with the same topic@groupid meta, so the second one ");
- sb.append(id).append(" do nothing ");
- LOG.info(sb.toString());
- }
- }
- }).start();
- }
-
- LOG.info("Successfully init " + id);
- }
-
-
- @Override
- public void close() {
- if (consumer != null && active == true) {
- active = false;
- consumer.shutdown();
-
- }
- }
-
-
- @Override
- public void activate() {
- if (consumer != null && active == false) {
- active = true;
- consumer.start();
- }
-
- }
-
-
- @Override
- public void deactivate() {
- if (consumer != null && active == true) {
- active = false;
- consumer.shutdown();
- }
- }
-
-
- public void sendTuple(OnsTuple OnsTuple) {
- OnsTuple.updateEmitMs();
- collector.emit(new Values(OnsTuple), OnsTuple.getCreateMs());
- }
-
-
- @Override
- public void nextTuple() {
- OnsTuple OnsTuple = null;
- try {
- OnsTuple = sendingQueue.take();
- }
- catch (InterruptedException e) {
- }
-
- if (OnsTuple == null) {
- return;
- }
-
- sendTuple(OnsTuple);
-
- }
-
-
- @Deprecated
- public void ack(Object msgId) {
- LOG.warn("Shouldn't go this function");
- }
-
-
- @Deprecated
- public void fail(Object msgId) {
- LOG.warn("Shouldn't go this function");
- }
-
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("OnsTuple"));
- }
-
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-
- @Override
- public void fail(Object msgId, List<Object> values) {
- OnsTuple OnsTuple = (OnsTuple) values.get(0);
- AtomicInteger failTimes = OnsTuple.getFailureTimes();
-
- int failNum = failTimes.incrementAndGet();
- if (failNum > maxFailTimes) {
- LOG.warn("Message " + OnsTuple.getMessage().getMsgID() + " fail times " + failNum);
- finishTuple(OnsTuple);
- return;
- }
-
- if (flowControl) {
- sendingQueue.offer(OnsTuple);
- }
- else {
- sendTuple(OnsTuple);
- }
- }
-
-
- public void finishTuple(OnsTuple OnsTuple) {
- waithHistogram.update(OnsTuple.getEmitMs() - OnsTuple.getCreateMs());
- processHistogram.update(System.currentTimeMillis() - OnsTuple.getEmitMs());
- OnsTuple.done();
- }
-
-
- @Override
- public void ack(Object msgId, List<Object> values) {
- OnsTuple OnsTuple = (OnsTuple) values.get(0);
- finishTuple(OnsTuple);
- }
-
-
- public Consumer getConsumer() {
- return consumer;
- }
-
-
- @Override
- public Action consume(Message message, ConsumeContext context) {
- try {
- OnsTuple OnsTuple = new OnsTuple(message);
-
- if (flowControl) {
- sendingQueue.offer(OnsTuple);
- }
- else {
- sendTuple(OnsTuple);
- }
-
- if (autoAck) {
- return Action.CommitMessage;
- }
- else {
- OnsTuple.waitFinish();
- if (OnsTuple.isSuccess() == true) {
- return Action.CommitMessage;
- }
- else {
- return Action.ReconsumeLater;
- }
- }
-
- }
- catch (Exception e) {
- LOG.error("Failed to emit " + id, e);
- return Action.ReconsumeLater;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerBolt.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerBolt.java
deleted file mode 100644
index 2a65e54..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerBolt.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package com.alibaba.jstorm.ons.producer;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-
-import com.alibaba.jstorm.ons.OnsTuple;
-import com.alibaba.jstorm.utils.RunCounter;
-import com.aliyun.openservices.ons.api.Message;
-import com.aliyun.openservices.ons.api.Producer;
-import com.aliyun.openservices.ons.api.SendResult;
-import org.apache.log4j.Logger;
-
-import java.util.Map;
-
-
-public class ProducerBolt implements IRichBolt {
-
- private static final long serialVersionUID = 2495121976857546346L;
-
- private static final Logger LOG = Logger.getLogger(ProducerBolt.class);
-
- protected OutputCollector collector;
- protected ProducerConfig producerConfig;
- protected Producer producer;
- protected RunCounter runCounter;
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- this.collector = collector;
- this.runCounter = new RunCounter(ProducerBolt.class);
- this.producerConfig = new ProducerConfig(stormConf);
- try {
- this.producer = ProducerFactory.mkInstance(producerConfig);
- } catch (Exception e) {
- // TODO Auto-generated catch block
- throw new RuntimeException(e);
- }
-
- }
-
- public void execute(Tuple tuple) {
- // TODO Auto-generated method stub
- OnsTuple msgTuple = (OnsTuple)tuple.getValue(0);
- long before = System.currentTimeMillis();
- SendResult sendResult = null;
- try {
- Message msg = new Message(
- producerConfig.getTopic(),
- producerConfig.getSubExpress(),
- //Message Body
- //�κζ�������ʽ�����ݣ�ONS�����κθ�Ԥ����ҪProducer��ConsumerЭ�̺�һ�µ����л��ͷ����л���ʽ
- msgTuple.getMessage().getBody());
-
- // ���ô�����Ϣ��ҵ��ؼ����ԣ��뾡����ȫ��Ψһ��
- // �Է��������������յ���Ϣ����£���ͨ��ONS Console��ѯ��Ϣ��������
- // ע�⣺������Ҳ����Ӱ����Ϣ�����շ�
- if (msgTuple.getMessage().getKey() != null) {
- msg.setKey(msgTuple.getMessage().getKey());
- }
- //������Ϣ��ֻҪ�����쳣���dzɹ�
- sendResult = producer.send(msg);
-
- LOG.info("Success send msg of " + msgTuple.getMessage().getMsgID());
- runCounter.count(System.currentTimeMillis() - before);
- } catch (Exception e) {
- LOG.error("Failed to send message, SendResult:" + sendResult + "\n", e);
- runCounter.count(System.currentTimeMillis() - before);
- collector.fail(tuple);
- return ;
- //throw new FailedException(e);
- }
-
- collector.ack(tuple);
- }
-
- public void cleanup() {
- // TODO Auto-generated method stub
- ProducerFactory.rmInstance(producerConfig.getProducerId());
- producer = null;
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- // TODO Auto-generated method stub
-
- }
-
- public Map<String, Object> getComponentConfiguration() {
- // TODO Auto-generated method stub
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerConfig.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerConfig.java
deleted file mode 100644
index 3ac7fb6..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerConfig.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package com.alibaba.jstorm.ons.producer;
-
-import java.util.Map;
-
-import com.alibaba.jstorm.ons.OnsConfig;
-import com.aliyun.openservices.ons.api.PropertyKeyConst;
-
-public class ProducerConfig extends OnsConfig{
-
- private static final long serialVersionUID = 1532254745626913230L;
-
- private final String producerId ;
-
- public ProducerConfig(Map conf) {
- super(conf);
-
- producerId = (String)conf.get(PropertyKeyConst.ProducerId);
- if (producerId == null) {
- throw new RuntimeException(PropertyKeyConst.ProducerId + " hasn't been set");
- }
-
-
- }
-
- public String getProducerId() {
- return producerId;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerFactory.java b/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerFactory.java
deleted file mode 100644
index 203805d..0000000
--- a/jstorm-utility/ons/src/main/java/com/alibaba/jstorm/ons/producer/ProducerFactory.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package com.alibaba.jstorm.ons.producer;
-
-import com.aliyun.openservices.ons.api.Consumer;
-import com.aliyun.openservices.ons.api.MessageListener;
-import com.aliyun.openservices.ons.api.ONSFactory;
-import com.aliyun.openservices.ons.api.Producer;
-import com.aliyun.openservices.ons.api.PropertyKeyConst;
-import org.apache.log4j.Logger;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-public class ProducerFactory {
-
- private static final Logger LOG = Logger.getLogger(ProducerFactory.class);
-
- public static Map<String, Producer> producers = new HashMap<String, Producer>();
-
- public static synchronized Producer mkInstance(ProducerConfig producerConfig) throws Exception{
-
- String producerId = producerConfig.getProducerId();
- Producer producer = producers.get(producerId);
- if (producer != null) {
-
- LOG.info("Producer of " + producerId + " has been created, don't recreate it ");
- return producer;
- }
-
- Properties properties = new Properties();
- properties.put(PropertyKeyConst.ProducerId, producerConfig.getProducerId());
- properties.put(PropertyKeyConst.AccessKey, producerConfig.getAccessKey());
- properties.put(PropertyKeyConst.SecretKey, producerConfig.getSecretKey());
-
- producer = ONSFactory.createProducer(properties);
- producer.start();
-
-
- producers.put(producerId, producer);
- LOG.info("Successfully create " + producerId + " producer");
-
- return producer;
-
- }
-
- public static synchronized void rmInstance(String producerId) {
- Producer producer = producers.remove(producerId);
- if (producer == null) {
-
- LOG.info("Producer of " + producerId + " has already been shutdown ");
- return ;
- }
-
- producer.shutdown();
- LOG.info("Producer of " + producerId + " has been shutdown ");
- return ;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/ons/test/main/resources/metaspout.yaml
----------------------------------------------------------------------
diff --git a/jstorm-utility/ons/test/main/resources/metaspout.yaml b/jstorm-utility/ons/test/main/resources/metaspout.yaml
deleted file mode 100755
index f007772..0000000
--- a/jstorm-utility/ons/test/main/resources/metaspout.yaml
+++ /dev/null
@@ -1,32 +0,0 @@
-
-#Meta Client Configuration
-# Please refer MetaClientConfig for every setting's details
-meta.topic: "bbl_user"
-meta.consumer.group: "bbl_user"
-meta.subexpress: "*"
-#meta.nameserver: ""
-#meta.pull.interval.ms: 0
-#meta.max.fail.times: 5
-#meta.internal.queue.size: 256
-#meta.batch.send.msg.size: 16
-#meta.batch.pull.msg.size: 32
-#meta.pull.thread.num: 4
-#meta.spout.auto.ack: false
-#meta.spout.flow.contro: true
-#yyyyMMddHHmmss
-meta.consumer.start.timestamp: "20141011000000"
-#meta.extra.properties:
-
-topology.name: test_meta_spout
-topology.version: 1.0.0
-topology.workers: 5
-topology.max.spout.pending: 10
-topology.acker.executors: 1
-
-topology.debug: false
-topology.debug.recv.tuple: false
-storm.cluster.mode: local
-
-topology.spout.parallel: 2
-topology.writer.parallel: 1
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/rocket-mq
----------------------------------------------------------------------
diff --git a/jstorm-utility/rocket-mq b/jstorm-utility/rocket-mq
deleted file mode 160000
index 372e9d8..0000000
--- a/jstorm-utility/rocket-mq
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit 372e9d87667272e6da7b5501d6d7dd2bad41ce6f
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/.gitignore
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/.gitignore b/jstorm-utility/topology-monitor/.gitignore
deleted file mode 100755
index 2bf102a..0000000
--- a/jstorm-utility/topology-monitor/.gitignore
+++ /dev/null
@@ -1,13 +0,0 @@
-# Lines that start with '#' are comments.
-*~
-*.diff
-*#
-.classpath
-.project
-.settings
-bin
-*.class
-.eclipse
-target
-*.iml
-*.versionsBackup
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/README.md
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/README.md b/jstorm-utility/topology-monitor/README.md
deleted file mode 100755
index ad4f892..0000000
--- a/jstorm-utility/topology-monitor/README.md
+++ /dev/null
@@ -1,2 +0,0 @@
-storm-util
-==========
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/pom.xml b/jstorm-utility/topology-monitor/pom.xml
deleted file mode 100755
index 714b3e3..0000000
--- a/jstorm-utility/topology-monitor/pom.xml
+++ /dev/null
@@ -1,110 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.dianping.cosmos</groupId>
- <artifactId>storm-util</artifactId>
- <packaging>jar</packaging>
- <version>1.3-SNAPSHOT</version>
- <name>storm-util</name>
- <url>http://maven.apache.org</url>
- <dependencies>
- <dependency>
- <groupId>com.dianping</groupId>
- <artifactId>blackhole-consumer</artifactId>
- <version>2.0.5</version>
- </dependency>
- <dependency>
- <groupId>com.dianping</groupId>
- <artifactId>blackhole-common</artifactId>
- <version>2.0.5</version>
- </dependency>
- <dependency>
- <groupId>redis.clients</groupId>
- <artifactId>jedis</artifactId>
- <version>2.4.2</version>
- </dependency>
- <dependency>
- <groupId>org.apache.storm</groupId>
- <artifactId>storm-core</artifactId>
- <version>0.9.1-incubating</version>
- <exclusions>
- <exclusion>
- <groupId>ch.qos.logback</groupId>
- <artifactId>logback-classic</artifactId>
- </exclusion>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>log4j-over-slf4j</artifactId>
- </exclusion>
- </exclusions>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.dianping.puma</groupId>
- <artifactId>puma-client</artifactId>
- <version>0.1.1</version>
- </dependency>
- <dependency>
- <groupId>log4j</groupId>
- <artifactId>log4j</artifactId>
- <version>1.2.14</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- <version>1.5.11</version>
- </dependency>
- <dependency>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-api</artifactId>
- <version>1.5.11</version>
- </dependency>
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>3.8.1</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.dianping.cat</groupId>
- <artifactId>cat-core</artifactId>
- <version>1.0.5</version>
- </dependency>
- </dependencies>
- <build>
- <plugins>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- <version>2.2</version>
- <executions>
- <execution>
- <goals>
- <goal>jar-no-fork</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <distributionManagement>
- <repository>
- <id>dianping.repo</id>
- <name>Dian Ping internal repository for released artifacts</name>
- <url>http://mvn.dianpingoa.com/dianping-releases</url>
- </repository>
- <snapshotRepository>
- <id>dianping.repo.snapshots</id>
- <name>mvn.dianpingoa.com-snapshots</name>
- <url>http://mvn.dianpingoa.com/dianping-snapshots</url>
- </snapshotRepository>
- </distributionManagement>
-<repositories>
- <repository>
- <id>clojars</id>
- <url>http://clojars.org/repo/</url>
- </repository>
-</repositories>
-
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeBlockingQueueSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeBlockingQueueSpout.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeBlockingQueueSpout.java
deleted file mode 100755
index 50e870a..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeBlockingQueueSpout.java
+++ /dev/null
@@ -1,114 +0,0 @@
-package com.dianping.cosmos;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.metric.api.CountMetric;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-
-import com.dianping.cosmos.util.CatMetricUtil;
-import com.dianping.cosmos.util.Constants;
-import com.dianping.lion.client.LionException;
-import com.dp.blackhole.consumer.Consumer;
-import com.dp.blackhole.consumer.ConsumerConfig;
-import com.dp.blackhole.consumer.MessageStream;
-
-@SuppressWarnings({"rawtypes", "unchecked"})
-public class BlackholeBlockingQueueSpout implements IRichSpout {
- private static final long serialVersionUID = 386827585122587595L;
- public static final Logger LOG = LoggerFactory.getLogger(BlackholeBlockingQueueSpout.class);
- private SpoutOutputCollector collector;
- private String topic;
- private String group;
- private MessageStream stream;
- private Consumer consumer;
- private MessageFetcher fetchThread;
- private int warnningStep = 0;
- private transient CountMetric _spoutMetric;
-
- public BlackholeBlockingQueueSpout(String topic, String group) {
- this.topic = topic;
- this.group = group;
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector _collector) {
- collector = _collector;
- _spoutMetric = new CountMetric();
- context.registerMetric(CatMetricUtil.getSpoutMetricName(topic, group),
- _spoutMetric, Constants.EMIT_FREQUENCY_IN_SECONDS);
-
- ConsumerConfig config = new ConsumerConfig();
- try {
- consumer = new Consumer(topic, group, config);
- } catch (LionException e) {
- throw new RuntimeException(e);
- }
- consumer.start();
- stream = consumer.getStream();
-
- fetchThread = new MessageFetcher(stream);
- new Thread(fetchThread).start();
- }
-
- @Override
- public void close() {
- fetchThread.shutdown();
- }
-
- @Override
- public void activate() {
-
- }
-
- @Override
- public void deactivate() {
- }
-
- @Override
- public void nextTuple() {
- String message = fetchThread.pollMessage();
- if (message != null) {
- collector.emit(topic, new Values(message));
- _spoutMetric.incr();
- } else {
- Utils.sleep(100);
- warnningStep++;
- if (warnningStep % 100 == 0) {
- LOG.warn("Queue is empty, cannot poll message.");
- }
- }
- }
-
- @Override
- public void ack(Object msgId) {
- LOG.debug("ack: " + msgId);
-
- }
-
- @Override
- public void fail(Object msgId) {
- LOG.info("fail: " + msgId);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(topic, new Fields("event"));
- }
-
- @Override
- public Map getComponentConfiguration(){
- Map<String, Object> conf = new HashMap<String, Object>();
- return conf;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeSpout.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeSpout.java
deleted file mode 100755
index 86dfffb..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/BlackholeSpout.java
+++ /dev/null
@@ -1,101 +0,0 @@
-package com.dianping.cosmos;
-
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.metric.api.CountMetric;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-
-import com.dianping.cosmos.util.CatMetricUtil;
-import com.dianping.cosmos.util.Constants;
-import com.dianping.lion.client.LionException;
-import com.dp.blackhole.consumer.Consumer;
-import com.dp.blackhole.consumer.ConsumerConfig;
-import com.dp.blackhole.consumer.MessageStream;
-
-@SuppressWarnings({"rawtypes"})
-public class BlackholeSpout implements IRichSpout {
- private static final long serialVersionUID = 1L;
-
- public static final Logger LOG = LoggerFactory.getLogger(BlackholeSpout.class);
-
- private SpoutOutputCollector collector;
- private String topic;
- private String group;
- private MessageStream stream;
- private Consumer consumer;
- private transient CountMetric _spoutMetric;
-
- public BlackholeSpout(String topic, String group) {
- this.topic = topic;
- this.group = group;
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector _collector) {
- collector = _collector;
- _spoutMetric = new CountMetric();
- context.registerMetric(CatMetricUtil.getSpoutMetricName(topic, group),
- _spoutMetric, Constants.EMIT_FREQUENCY_IN_SECONDS);
-
- ConsumerConfig config = new ConsumerConfig();
- try {
- consumer = new Consumer(topic, group, config);
- } catch (LionException e) {
- throw new RuntimeException(e);
- }
- consumer.start();
- stream = consumer.getStream();
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void activate() {
- }
-
- @Override
- public void deactivate() {
- }
-
- @Override
- public void nextTuple() {
- for (String message : stream) {
- collector.emit(topic, new Values(message));
- _spoutMetric.incr();
- }
- }
-
- @Override
- public void ack(Object msgId) {
- LOG.debug("ack: " + msgId);
-
- }
-
- @Override
- public void fail(Object msgId) {
- LOG.info("fail: " + msgId);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(topic, new Fields("event"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- // TODO Auto-generated method stub
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/MessageFetcher.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/MessageFetcher.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/MessageFetcher.java
deleted file mode 100755
index b379dd3..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/MessageFetcher.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package com.dianping.cosmos;
-
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.dp.blackhole.consumer.MessageStream;
-
-public class MessageFetcher implements Runnable {
- public static final Logger LOG = LoggerFactory.getLogger(MessageFetcher.class);
- private final int MAX_QUEUE_SIZE = 1000;
- private final int TIME_OUT = 5000;
-
- private BlockingQueue<String> emitQueue;
- private MessageStream stream;
-
- private volatile boolean running;
- public MessageFetcher(MessageStream stream) {
- this.running = true;
- this.stream = stream;
- this.emitQueue = new LinkedBlockingQueue<String>(MAX_QUEUE_SIZE);
- }
-
- @Override
- public void run() {
- while (running) {
- for (String message : stream) {
- try {
- while(!emitQueue.offer(message, TIME_OUT, TimeUnit.MILLISECONDS)) {
- LOG.error("Queue is full, cannot offer message.");
- }
- } catch (InterruptedException e) {
- LOG.error("Thread Interrupted");
- running = false;
- }
- }
- }
- }
-
- public String pollMessage() {
- return emitQueue.poll();
- }
-
- public void shutdown() {
- this.running = false;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/PumaSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/PumaSpout.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/PumaSpout.java
deleted file mode 100755
index fb1ff2a..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/PumaSpout.java
+++ /dev/null
@@ -1,194 +0,0 @@
-package com.dianping.cosmos;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.BlockingQueue;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.LinkedBlockingQueue;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-
-import com.dianping.puma.api.ConfigurationBuilder;
-import com.dianping.puma.api.EventListener;
-import com.dianping.puma.api.PumaClient;
-import com.dianping.puma.core.event.ChangedEvent;
-import com.dianping.puma.core.event.RowChangedEvent;
-
-
-public class PumaSpout implements IRichSpout{
- public static final Logger LOG = LoggerFactory.getLogger(PumaSpout.class);
-
- private SpoutOutputCollector collector;
- private PumaEventListener listener;
- private BlockingQueue<RowChangedEvent> receiveQueue;
- private Map<String, RowChangedEvent> waitingForAck;
-
- private Map<String, String[]> watchTables;
- private String pumaHost;
- private int pumaPort;
- private String pumaName;
- private String pumaTarget;
- private int pumaServerId;
- private String pumaSeqFileBase;
-
- public PumaSpout(String host, int port, String name, String target, HashMap<String, String[]> tables) {
- this(host, port, name, target, tables, null);
- }
-
- public PumaSpout(String host, int port, String name, String target, HashMap<String, String[]> tables, String seqFileBase) {
- this(host, port, name, target, tables, 9999, seqFileBase);
- }
-
- public PumaSpout(String host, int port, String name, String target, HashMap<String, String[]> tables, int serverId, String seqFileBase) {
- pumaHost = host;
- pumaPort = port;
- pumaName = name;
- pumaTarget = target;
- watchTables = tables;
- pumaServerId = serverId;
- pumaSeqFileBase = seqFileBase;
- }
-
- protected static String getMsgId(RowChangedEvent e) {
- return e.getBinlogServerId() + "." + e.getBinlog() + "." + e.getBinlogPos();
- }
-
- protected static String getStreamId(RowChangedEvent e) {
- return e.getDatabase() + "." + e.getTable();
- }
-
- class PumaEventListener implements EventListener {
-
- @Override
- public void onEvent(ChangedEvent event) throws Exception {
- if (!(event instanceof RowChangedEvent)) {
- LOG.error("received event " + event +" which is not a RowChangedEvent");
- return;
- }
- RowChangedEvent e = (RowChangedEvent)event;
- receiveQueue.add(e);
- }
-
- @Override
- public boolean onException(ChangedEvent event, Exception e) {
- return false;
- }
-
- @Override
- public void onConnectException(Exception e) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void onConnected() {
- LOG.info("pumaspout connected");
- }
-
- @Override
- public void onSkipEvent(ChangedEvent event) {
- // TODO Auto-generated method stub
-
- }
-
- }
-
- @Override
- public void ack(Object msgId) {
- LOG.debug("ack: " + msgId);
- waitingForAck.remove(msgId);
- }
-
- @Override
- public void activate() {
- }
-
- @Override
- public void close() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void deactivate() {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public void fail(Object msgId) {
- LOG.debug("fail: " + msgId + ", resend event");
- RowChangedEvent event = waitingForAck.get(msgId);
- collector.emit(getStreamId(event), new Values(event), getMsgId(event));
- }
-
- @Override
- public void nextTuple() {
- RowChangedEvent event = null;
- try {
- event = receiveQueue.take();
- } catch (InterruptedException e) {
- return;
- }
-
- String msgId = getMsgId(event);
- collector.emit(getStreamId(event), new Values(event), msgId);
- waitingForAck.put(msgId, event);
- }
-
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector _collector) {
- collector = _collector;
- receiveQueue = new LinkedBlockingQueue<RowChangedEvent>();
- waitingForAck = new ConcurrentHashMap<String, RowChangedEvent>();
-
- ConfigurationBuilder configBuilder = new ConfigurationBuilder();
- configBuilder.ddl(false);
- configBuilder.dml(true);
- configBuilder.transaction(false);
- if (pumaSeqFileBase != null) {
- configBuilder.seqFileBase(pumaSeqFileBase);
- }
- configBuilder.host(pumaHost);
- configBuilder.port(pumaPort);
- configBuilder.serverId(pumaServerId);
- configBuilder.name(pumaName);
- for (Entry<String, String[]> e : watchTables.entrySet()) {
- String db = e.getKey();
- String[] tabs = e.getValue();
- configBuilder.tables(db, tabs);
- }
- configBuilder.target(pumaTarget);
- PumaClient pc = new PumaClient(configBuilder.build());
-
- listener = new PumaEventListener();
- pc.register(listener);
- pc.start();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for (Entry<String, String[]> entry : watchTables.entrySet()) {
- String db = entry.getKey();
- for (String table : entry.getValue()) {
- String dbTable = db + "." + table;
- declarer.declareStream(dbTable, new Fields("event"));
- }
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- // TODO Auto-generated method stub
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/RedisSinkBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/RedisSinkBolt.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/RedisSinkBolt.java
deleted file mode 100755
index d2e02e1..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/RedisSinkBolt.java
+++ /dev/null
@@ -1,167 +0,0 @@
-package com.dianping.cosmos;
-
-import java.util.Map;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.commons.pool2.impl.GenericObjectPoolConfig;
-
-import redis.clients.jedis.Jedis;
-import redis.clients.jedis.JedisPool;
-import redis.clients.jedis.exceptions.JedisConnectionException;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-
-public class RedisSinkBolt implements IRichBolt {
- private final Log LOG = LogFactory.getLog(RedisSinkBolt.class);
- private OutputCollector collector;
- private JedisPool pool;
- private Updater updater;
-
- private String redisHost;
- private int redisPort;
- private int timeout;
- private int retryLimit;
-
- public RedisSinkBolt(String redisHost, int redisPort) {
- this(redisHost, redisPort, 50, 3);
- }
-
- public RedisSinkBolt(String redisHost, int redisPort, int retryLimit) {
- this(redisHost, redisPort, 50, retryLimit);
- }
-
- public RedisSinkBolt(String redisHost, int redisPort, int timeout, int retryLimit) {
- this.redisHost = redisHost;
- this.redisPort = redisPort;
- this.timeout = timeout;
- this.retryLimit = retryLimit;
- }
-
- public void setUpdater(Updater updater) {
- this.updater = updater;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- OutputCollector collector) {
- this.collector = collector;
-
- GenericObjectPoolConfig pconf = new GenericObjectPoolConfig();
- pconf.setMaxWaitMillis(2000);
- pconf.setMaxTotal(1000);
- pconf.setTestOnBorrow(false);
- pconf.setTestOnReturn(false);
- pconf.setTestWhileIdle(true);
- pconf.setMinEvictableIdleTimeMillis(120000);
- pconf.setTimeBetweenEvictionRunsMillis(60000);
- pconf.setNumTestsPerEvictionRun(-1);
-
- pool = new JedisPool(pconf, redisHost, redisPort, timeout);
- }
-
- private byte[] retryGet(byte[] key) {
- int retry = 0;
- byte[] ret;
- while (true) {
- Jedis jedis = null;
- try {
- jedis = pool.getResource();
- ret = jedis.get(key);
- return ret;
- } catch (JedisConnectionException e) {
- if (jedis != null) {
- pool.returnBrokenResource(jedis);
- jedis = null;
- }
- if (retry > retryLimit) {
- throw e;
- }
- retry++;
- } finally {
- if (jedis != null) {
- pool.returnResource(jedis);
- }
- }
- }
- }
-
- private String retrySet(byte[] key, byte[] value) {
- int retry = 0;
- String ret;
- while (true) {
- Jedis jedis = null;
- try {
- jedis = pool.getResource();
- ret = jedis.set(key, value);
- return ret;
- } catch (JedisConnectionException e) {
- if (jedis != null) {
- pool.returnBrokenResource(jedis);
- jedis = null;
- }
- if (retry > retryLimit) {
- throw e;
- }
- retry++;
- } finally {
- if (jedis != null) {
- pool.returnResource(jedis);
- }
- }
-
- }
- }
-
- @Override
- public void execute(Tuple input) {
- byte[] key = input.getBinary(0);
- byte[] value = input.getBinary(1);
-
- if (key == null || value == null) {
- collector.ack(input);
- return;
- }
-
- try {
- if (updater != null) {
- byte[] oldValue = retryGet(key);
- byte[] newValue = updater.update(oldValue, value);
- if (newValue == null) {
- collector.ack(input);
- return;
- }
- retrySet(key, newValue);
- collector.ack(input);
- return;
- }
-
- retrySet(key, value);
- collector.ack(input);
- } catch (JedisConnectionException e) {
- LOG.warn("JedisConnectionException catched ", e);
- collector.fail(input);
- }
- }
-
- @Override
- public void cleanup() {
- pool.destroy();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- // TODO Auto-generated method stub
-
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- // TODO Auto-generated method stub
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/Updater.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/Updater.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/Updater.java
deleted file mode 100755
index 04eedf2..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/Updater.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.dianping.cosmos;
-
-import java.io.Serializable;
-
-public interface Updater extends Serializable {
-
- byte[] update(byte[] oldValue, byte[] newValue);
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/metric/CatMetricsConsumer.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/metric/CatMetricsConsumer.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/metric/CatMetricsConsumer.java
deleted file mode 100755
index 1e2ff06..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/metric/CatMetricsConsumer.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package com.dianping.cosmos.metric;
-
-import java.util.Collection;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.metric.api.IMetricsConsumer;
-import backtype.storm.task.IErrorReporter;
-import backtype.storm.task.TopologyContext;
-
-import com.dianping.cosmos.monitor.HttpCatClient;
-import com.dianping.cosmos.util.CatMetricUtil;
-
-/**
- * Listens for all metrics, dumps them to cat
- *
- * To use, add this to your topology's configuration:
- * conf.registerMetricsConsumer(com.dianping.cosmos.metric.CatSpoutMetricsConsumer.class, 1);
- *
- * Or edit the storm.yaml config file:
- *
- * topology.metrics.consumer.register:
- * - class: "com.dianping.cosmos.metric.CatSpoutMetricsConsumer"
- * parallelism.hint: 1
- *
- */
-@SuppressWarnings("rawtypes")
-public class CatMetricsConsumer implements IMetricsConsumer {
- private static final Logger LOGGER = LoggerFactory.getLogger(CatMetricsConsumer.class);
- private String stormId;
-
- @Override
- public void prepare(Map stormConf, Object registrationArgument,
- TopologyContext context, IErrorReporter errorReporter) {
- stormId = context.getStormId();
- }
-
-
- @Override
- public void handleDataPoints(TaskInfo taskInfo, Collection<DataPoint> dataPoints) {
- for (DataPoint p : dataPoints) {
- try{
- if(CatMetricUtil.isCatMetric(p.name)){
- HttpCatClient.sendMetric(getTopologyName(),
- CatMetricUtil.getCatMetricKey(p.name), "sum", String.valueOf(p.value));
- }
- }
- catch(Exception e){
- LOGGER.warn("send metirc 2 cat error.", e);
- }
- }
- }
-
- private String getTopologyName(){
- return StringUtils.substringBefore(stormId, "-");
- }
-
- @Override
- public void cleanup() {
- }
-
- public static void main(String[] args){
- CatMetricsConsumer c = new CatMetricsConsumer();
- c.stormId = "HippoUV_25-15-1410857734";
- System.out.println(c.getTopologyName());
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpCatClient.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpCatClient.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpCatClient.java
deleted file mode 100755
index 9fe6987..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpCatClient.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.dianping.cosmos.monitor;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class HttpCatClient {
- private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientService.class);
-
- private HttpCatClient(){
- }
-
- private static HttpClientService httClientSerivce = new HttpClientService();
-
- private static List<String> CAT_SERVERS = new ArrayList<String>();
- //初始化访问的server的地址
- private static AtomicInteger CURRENT_SERVER_INDEX = new AtomicInteger(0);
-
- static{
- CAT_SERVERS.add("http://cat02.nh:8080/");
- CAT_SERVERS.add("http://cat03.nh:8080/");
- CAT_SERVERS.add("http://cat04.nh:8080/");
- CAT_SERVERS.add("http://cat05.nh:8080/");
- CAT_SERVERS.add("http://cat06.nh:8080/");
- }
-
- public static void sendMetric(String domain, String key, String op, String value){
- String server = getServer();
- try{
- StringBuilder request = new StringBuilder();
- request.append(server);
- request.append("cat/r/monitor?timestamp=");
- request.append(System.currentTimeMillis());
- request.append("&group=Storm&domain=");
- request.append(domain);
- request.append("&key=");
- request.append(key);
- request.append("&op=");
- request.append(op);
- request.append("&" + op +"=");
- request.append(value);
- httClientSerivce.get(request.toString());
- }
- catch(Exception e){
- CURRENT_SERVER_INDEX.getAndIncrement();
- LOGGER.error("send to cat " + server + " error.", e);
- }
- }
-
- private static String getServer(){
- int index = CURRENT_SERVER_INDEX.get() % CAT_SERVERS.size();
- return CAT_SERVERS.get(index);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpClientService.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpClientService.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpClientService.java
deleted file mode 100755
index 843e5da..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/HttpClientService.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package com.dianping.cosmos.monitor;
-
-import java.io.IOException;
-import java.net.URI;
-import java.net.URL;
-import java.util.List;
-
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.NameValuePair;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.entity.UrlEncodedFormEntity;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.http.client.methods.HttpPost;
-import org.apache.http.client.methods.HttpUriRequest;
-import org.apache.http.impl.client.DefaultHttpClient;
-import org.apache.http.params.CoreConnectionPNames;
-import org.apache.http.util.EntityUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * 向cat写入metric相关信息
- * @author xinchun.wang
- *
- */
-public class HttpClientService {
-
- private static final Logger LOGGER = LoggerFactory.getLogger(HttpClientService.class);
-
-// private static JSONUtil jsonUtil = JSONUtil.getInstance();
-
- protected String excuteGet(String url, boolean useURI) throws Exception {
- HttpClient httpClient = getHttpClient();
- HttpUriRequest request = getGetRequest(url, useURI);
-
- HttpResponse httpResponse = httpClient.execute(request);
-
- String response = parseResponse(url, httpResponse);
- return response;
- }
-
- protected String excutePost(String url, List<NameValuePair> nvps) throws Exception {
- HttpClient httpClient = getHttpClient();
- HttpPost httpPost = new HttpPost(url);
- httpPost.setEntity(new UrlEncodedFormEntity(nvps));
- HttpResponse httpResponse = httpClient.execute(httpPost);
- String response = parseResponse(url, httpResponse);
- return response;
- }
-
- private String parseResponse(String url, HttpResponse httpResponse)
- throws Exception, IOException {
- int status = httpResponse.getStatusLine().getStatusCode();
- if(status != 200){
- String errorMsg = "Error occurs in calling acl service: " + url + ", with status:" + status;
- throw new Exception(errorMsg);
- }
- HttpEntity entry = httpResponse.getEntity();
- String response = EntityUtils.toString(entry, "UTF-8");
- return response;
- }
-
- private HttpClient getHttpClient() {
- HttpClient httpClient = new DefaultHttpClient();
- httpClient.getParams().setParameter(CoreConnectionPNames.CONNECTION_TIMEOUT, 5000);
- httpClient.getParams().setParameter(CoreConnectionPNames.SO_TIMEOUT, 5000);
- return httpClient;
- }
-
- private HttpUriRequest getGetRequest(String url, boolean useURI) throws Exception {
- HttpUriRequest request;
- if(useURI){
- URL requestURL = new URL(url);
- URI uri = new URI(
- requestURL.getProtocol(),
- null,
- requestURL.getHost(),
- requestURL.getPort(),
- requestURL.getPath(),
- requestURL.getQuery(),
- null);
- request = new HttpGet(uri);
- }
- else{
- request = new HttpGet(url);
- }
- return request;
- }
-
-// protected boolean parseResultMap(String response, String url) throws Exception{
-// Map<?, ?> result = jsonUtil.formatJSON2Map(response);
-// if(result == null){
-// return false;
-// }
-// String code = (String)result.get("statusCode");
-// if("-1".equals(code)){
-// throw new Exception(String.valueOf(result.get("errorMsg")));
-// }
-// return true;
-// }
-
-
- public void get(String url) throws Exception{
- String response = excuteGet(url, false);
- if(response == null){
- LOGGER.error("call uri error, response is null, uri = " + url);
- }
- //parseResultMap(response, url);
- }
-
- public void getByURI(String url) throws Exception{
- String response = excuteGet(url, true);
- if(response == null){
- LOGGER.error("call uri error, response is null, uri = " + url);
- }
- //parseResultMap(response, url);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/SpoutCounter.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/SpoutCounter.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/SpoutCounter.java
deleted file mode 100755
index 4e06df3..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/SpoutCounter.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package com.dianping.cosmos.monitor;
-
-import java.util.concurrent.atomic.AtomicLong;
-
-public class SpoutCounter {
- private AtomicLong repeatCounter = new AtomicLong(0l);
- private AtomicLong tupleCounter = new AtomicLong(0l);
-
- public void incrRepeatCounter(){
- repeatCounter.incrementAndGet();
- }
-
- public long getRepeatCounter(){
- return repeatCounter.get();
- }
-
- public void incrTupleCounter(long increment){
- tupleCounter.addAndGet(increment);
- }
-
- public long getTupleCounter(){
- return tupleCounter.get();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/TopologyMonitor.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/TopologyMonitor.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/TopologyMonitor.java
deleted file mode 100755
index 88787ca..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/TopologyMonitor.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package com.dianping.cosmos.monitor;
-
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.commons.lang.StringUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.metric.api.IMetricsConsumer.DataPoint;
-import backtype.storm.metric.api.IMetricsConsumer.TaskInfo;
-
-public class TopologyMonitor {
- private static final Logger LOGGER = LoggerFactory.getLogger(TopologyMonitor.class);
-
- private static Map<Integer, SpoutCounter> spoutCounterMap = new ConcurrentHashMap<Integer, SpoutCounter>();
-
- public void monitorStatus(String stormId, TaskInfo taskInfo, DataPoint p) {
- SpoutCounter counter = spoutCounterMap.get(taskInfo.srcTaskId);
- if(counter == null){
- counter = new SpoutCounter();
- spoutCounterMap.put(taskInfo.srcTaskId, counter);
- }
- counter.incrRepeatCounter();
- String value = String.valueOf(p.value);
- long increment = Long.parseLong(value);
- counter.incrTupleCounter(increment);
- //连续1分钟
- if(counter.getRepeatCounter() >= 12){
- //数据量少于某个记录
- LOGGER.info("last minute tuple = " + counter.getTupleCounter());
- if(counter.getTupleCounter() <= 10000){
- LOGGER.error("spout has problem, restar topology....");
- //restartTopology(stormId);
- }
- spoutCounterMap.clear();
- }
- }
-
- /**
- * stromId: MobileUV_7-212-1409657868
- * @param stormId
- */
- public void restartTopology(String stormId){
- String currentTopology = StringUtils.substringBefore(stormId, "-");
- String topologyPrefix = StringUtils.substringBefore(currentTopology, "_");
- String topologyIndex = StringUtils.substringAfter(currentTopology, "_");
- int newIndex = Integer.parseInt(topologyIndex) + 1;
- String newTopologyName = topologyPrefix + "_" + newIndex;
- LOGGER.info("new topology name = " + newTopologyName);
- execStartCommand(newTopologyName);
- LOGGER.info("execStartCommand finish ..");
- execShutdownCommand(currentTopology);
- LOGGER.info("execShutdownCommand finish ..");
- }
-
- public void execStartCommand(String topologyName){
- Process process;
- try {
- process = Runtime.getRuntime().exec(new String[]{
- "/usr/local/storm/bin/storm",
- "jar",
- "/home/hadoop/topology/meteor-traffic-0.0.1.jar",
- "com.dianping.data.warehouse.traffic.mobile.MobileUVTopology",
- topologyName});
- process.waitFor();
- } catch (Exception e) {
- LOGGER.error("", e);
- }
- }
-
- public void execShutdownCommand(String topologyName){
- Process process;
- try {
- process = Runtime.getRuntime().exec(new String[]{
- "/usr/local/storm/bin/storm",
- "kill",
- topologyName,
- "10"});
- process.waitFor();
- } catch (Exception e) {
- LOGGER.error("", e);
- }
- }
-
- public static void main(String[] args){
- TopologyMonitor monitor = new TopologyMonitor();
- monitor.restartTopology("MobileUV_7-212-1409657868");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoBolt.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoBolt.java
deleted file mode 100755
index 2dd99bd..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoBolt.java
+++ /dev/null
@@ -1,170 +0,0 @@
-package com.dianping.cosmos.monitor.topology;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.thrift7.TException;
-import org.apache.thrift7.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
-import backtype.storm.generated.ClusterSummary;
-import backtype.storm.generated.ExecutorSummary;
-import backtype.storm.generated.Nimbus.Client;
-import backtype.storm.generated.NotAliveException;
-import backtype.storm.generated.SupervisorSummary;
-import backtype.storm.generated.TopologyInfo;
-import backtype.storm.generated.TopologySummary;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-import com.dianping.cosmos.monitor.HttpCatClient;
-import com.dianping.cosmos.util.Constants;
-import com.dianping.cosmos.util.TupleHelpers;
-
-@SuppressWarnings({ "rawtypes", "unchecked"})
-public class ClusterInfoBolt extends BaseRichBolt{
- private static final Logger LOGGER = LoggerFactory.getLogger(ClusterInfoBolt.class);
-
- private static final long serialVersionUID = 1L;
- private transient Client client;
- private transient NimbusClient nimbusClient;
- private OutputCollector collector;
- private Map configMap = null;
-
- @Override
- public void prepare(Map map, TopologyContext topologycontext,
- OutputCollector outputcollector) {
- this.collector = outputcollector;
- this.configMap = map;
- initClient(configMap);
- }
- private void initClient(Map map) {
- nimbusClient = NimbusClient.getConfiguredClient(map);
- client = nimbusClient.getClient();
- }
- @Override
- public void execute(Tuple tuple) {
- if (TupleHelpers.isTickTuple(tuple)) {
- if(nimbusClient == null){
- initClient(configMap);
- }
- getClusterInfo(client);
- collector.emit(new Values(tuple));
- }
- }
-
- private void getClusterInfo(Client client) {
- try {
- ClusterSummary clusterSummary = client.getClusterInfo();
- List<SupervisorSummary> supervisorSummaryList = clusterSummary.get_supervisors();
- int totalWorkers = 0;
- int usedWorkers = 0;
- for(SupervisorSummary summary : supervisorSummaryList){
- totalWorkers += summary.get_num_workers() ;
- usedWorkers += summary.get_num_used_workers();
- }
- int freeWorkers = totalWorkers - usedWorkers;
- LOGGER.info("cluster totalWorkers = " + totalWorkers
- + ", usedWorkers = " + usedWorkers
- + ", freeWorkers = " + freeWorkers);
-
- HttpCatClient.sendMetric("ClusterMonitor", "freeSlots", "avg", String.valueOf(freeWorkers));
- HttpCatClient.sendMetric("ClusterMonitor", "totalSlots", "avg", String.valueOf(totalWorkers));
-
- List<TopologySummary> topologySummaryList = clusterSummary.get_topologies();
- long clusterTPS = 0l;
- for(TopologySummary topology : topologySummaryList){
- long topologyTPS = getTopologyTPS(topology, client);
- clusterTPS += topologyTPS;
- if(topology.get_name().startsWith("ClusterMonitor")){
- continue;
- }
- HttpCatClient.sendMetric(topology.get_name(), topology.get_name() + "-TPS", "avg", String.valueOf(topologyTPS));
- }
- HttpCatClient.sendMetric("ClusterMonitor", "ClusterEmitTPS", "avg", String.valueOf(clusterTPS));
-
- } catch (TException e) {
- initClient(configMap);
- LOGGER.error("get client info error.", e);
- }
- catch(NotAliveException nae){
- LOGGER.warn("topology is dead.", nae);
- }
- }
-
- protected long getTopologyTPS(TopologySummary topology, Client client) throws NotAliveException, TException{
- long topologyTps = 0l;
- String topologyId = topology.get_id();
- if(topologyId.startsWith("ClusterMonitor")){
- return topologyTps;
- }
- TopologyInfo topologyInfo = client.getTopologyInfo(topologyId);
- if(topologyInfo == null){
- return topologyTps;
- }
- List<ExecutorSummary> executorSummaryList = topologyInfo.get_executors();
- for(ExecutorSummary executor : executorSummaryList){
- topologyTps += getComponentTPS(executor);
- }
- LOGGER.info("topology = " + topology.get_name() + ", tps = " + topologyTps);
- return topologyTps;
- }
-
- private long getComponentTPS(ExecutorSummary executor) {
- long componentTps = 0l;
- if(executor == null){
- return componentTps;
- }
- String componentId = executor.get_component_id();
-
- if(Utils.isSystemId(componentId)){
- return componentTps;
- }
- if(executor.get_stats() == null){
- return componentTps;
- }
-
- Map<String, Map<String, Long>> emittedMap = executor.get_stats().get_emitted();
- Map<String, Long> minutesEmitted = emittedMap.get("600");
- if(minutesEmitted == null){
- return componentTps;
- }
- for(Map.Entry<String, Long> emittedEntry : minutesEmitted.entrySet()){
- if(Utils.isSystemId(emittedEntry.getKey())){
- continue;
- }
- if(executor.get_uptime_secs() >= 600){
- componentTps += emittedEntry.getValue() / 600;
- }
- if(executor.get_uptime_secs() >= 10 && executor.get_uptime_secs() < 600){
- componentTps += emittedEntry.getValue() / executor.get_uptime_secs();
- }
- }
- LOGGER.debug("component = " + componentId + ", tps = " + componentTps);
- return componentTps;
- }
-
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer outputfieldsdeclarer) {
- outputfieldsdeclarer.declare(new Fields("monitor"));
- }
-
- @Override
- public Map getComponentConfiguration(){
- Map<String, Object> conf = new HashMap<String, Object>();
- conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, Constants.TPS_COUNTER_FREQUENCY_IN_SECONDS);
- return conf;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoTopology.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoTopology.java
deleted file mode 100755
index 2c9e9c5..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/monitor/topology/ClusterInfoTopology.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package com.dianping.cosmos.monitor.topology;
-
-import backtype.storm.Config;
-import backtype.storm.StormSubmitter;
-import backtype.storm.topology.TopologyBuilder;
-
-public class ClusterInfoTopology {
- public static void main(String[] args) throws Exception {
- TopologyBuilder builder = new TopologyBuilder();
-
- builder.setBolt("ClusterInfo", new ClusterInfoBolt(), 1);
- Config conf = new Config();
- conf.setNumWorkers(1);
-
- StormSubmitter.submitTopology("ClusterMonitor", conf, builder.createTopology());
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatClient.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatClient.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatClient.java
deleted file mode 100755
index 65f4a04..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatClient.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package com.dianping.cosmos.util;
-import com.dianping.cat.Cat;
-
-public class CatClient {
-
- private CatClient(){
- }
-
- private static Cat CAT = Cat.getInstance();
-
- static{
- Cat.initialize("cat02.nh","cat03.nh","cat04.nh","cat05.nh");
- }
-
- public static Cat getInstance(){
- return CAT;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatMetricUtil.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatMetricUtil.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatMetricUtil.java
deleted file mode 100755
index 0c4b183..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/CatMetricUtil.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package com.dianping.cosmos.util;
-
-import org.apache.commons.lang.StringUtils;
-
-public class CatMetricUtil {
- private static final String CAT_METRIC_NAME_PREFIX = "Cat#";
-
- /**
- * 返回BlackHoleSout的metric名称
- * @param topic
- * @param group
- * @return
- */
- public static String getSpoutMetricName(String topic, String group){
- return CAT_METRIC_NAME_PREFIX.concat(topic).concat("[").concat(group).concat("]");
- }
-
- /**
- * 判断是否cat的metirc
- * @param dataPointName
- * @return
- */
- public static boolean isCatMetric(String dataPointName){
- if(StringUtils.isBlank(dataPointName)){
- return false;
- }
- return StringUtils.startsWith(dataPointName, CAT_METRIC_NAME_PREFIX);
- }
-
-
-
- /**
- * 根据metric的名字,返回写入cat上的key
- * @param spoutMetricName
- * @return
- */
- public static String getCatMetricKey(String spoutMetricName){
- if(StringUtils.isBlank(spoutMetricName)
- || !StringUtils.startsWith(spoutMetricName, CAT_METRIC_NAME_PREFIX)){
- return "default";
- }
- return StringUtils.substringAfter(spoutMetricName, CAT_METRIC_NAME_PREFIX);
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/Constants.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/Constants.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/Constants.java
deleted file mode 100755
index c5edf1a..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/Constants.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package com.dianping.cosmos.util;
-
-public class Constants {
-
- public static final int EMIT_FREQUENCY_IN_SECONDS = 5;
-
- public static final int TPS_COUNTER_FREQUENCY_IN_SECONDS = 30;
-
-}
[38/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ExecutorInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ExecutorInfo.java b/jstorm-client/src/main/java/backtype/storm/generated/ExecutorInfo.java
deleted file mode 100644
index 2904b4a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ExecutorInfo.java
+++ /dev/null
@@ -1,420 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ExecutorInfo implements org.apache.thrift7.TBase<ExecutorInfo, ExecutorInfo._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ExecutorInfo");
-
- private static final org.apache.thrift7.protocol.TField TASK_START_FIELD_DESC = new org.apache.thrift7.protocol.TField("task_start", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField TASK_END_FIELD_DESC = new org.apache.thrift7.protocol.TField("task_end", org.apache.thrift7.protocol.TType.I32, (short)2);
-
- private int task_start; // required
- private int task_end; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- TASK_START((short)1, "task_start"),
- TASK_END((short)2, "task_end");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // TASK_START
- return TASK_START;
- case 2: // TASK_END
- return TASK_END;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __TASK_START_ISSET_ID = 0;
- private static final int __TASK_END_ISSET_ID = 1;
- private BitSet __isset_bit_vector = new BitSet(2);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TASK_START, new org.apache.thrift7.meta_data.FieldMetaData("task_start", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.TASK_END, new org.apache.thrift7.meta_data.FieldMetaData("task_end", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ExecutorInfo.class, metaDataMap);
- }
-
- public ExecutorInfo() {
- }
-
- public ExecutorInfo(
- int task_start,
- int task_end)
- {
- this();
- this.task_start = task_start;
- set_task_start_isSet(true);
- this.task_end = task_end;
- set_task_end_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public ExecutorInfo(ExecutorInfo other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.task_start = other.task_start;
- this.task_end = other.task_end;
- }
-
- public ExecutorInfo deepCopy() {
- return new ExecutorInfo(this);
- }
-
- @Override
- public void clear() {
- set_task_start_isSet(false);
- this.task_start = 0;
- set_task_end_isSet(false);
- this.task_end = 0;
- }
-
- public int get_task_start() {
- return this.task_start;
- }
-
- public void set_task_start(int task_start) {
- this.task_start = task_start;
- set_task_start_isSet(true);
- }
-
- public void unset_task_start() {
- __isset_bit_vector.clear(__TASK_START_ISSET_ID);
- }
-
- /** Returns true if field task_start is set (has been assigned a value) and false otherwise */
- public boolean is_set_task_start() {
- return __isset_bit_vector.get(__TASK_START_ISSET_ID);
- }
-
- public void set_task_start_isSet(boolean value) {
- __isset_bit_vector.set(__TASK_START_ISSET_ID, value);
- }
-
- public int get_task_end() {
- return this.task_end;
- }
-
- public void set_task_end(int task_end) {
- this.task_end = task_end;
- set_task_end_isSet(true);
- }
-
- public void unset_task_end() {
- __isset_bit_vector.clear(__TASK_END_ISSET_ID);
- }
-
- /** Returns true if field task_end is set (has been assigned a value) and false otherwise */
- public boolean is_set_task_end() {
- return __isset_bit_vector.get(__TASK_END_ISSET_ID);
- }
-
- public void set_task_end_isSet(boolean value) {
- __isset_bit_vector.set(__TASK_END_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case TASK_START:
- if (value == null) {
- unset_task_start();
- } else {
- set_task_start((Integer)value);
- }
- break;
-
- case TASK_END:
- if (value == null) {
- unset_task_end();
- } else {
- set_task_end((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case TASK_START:
- return Integer.valueOf(get_task_start());
-
- case TASK_END:
- return Integer.valueOf(get_task_end());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case TASK_START:
- return is_set_task_start();
- case TASK_END:
- return is_set_task_end();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof ExecutorInfo)
- return this.equals((ExecutorInfo)that);
- return false;
- }
-
- public boolean equals(ExecutorInfo that) {
- if (that == null)
- return false;
-
- boolean this_present_task_start = true;
- boolean that_present_task_start = true;
- if (this_present_task_start || that_present_task_start) {
- if (!(this_present_task_start && that_present_task_start))
- return false;
- if (this.task_start != that.task_start)
- return false;
- }
-
- boolean this_present_task_end = true;
- boolean that_present_task_end = true;
- if (this_present_task_end || that_present_task_end) {
- if (!(this_present_task_end && that_present_task_end))
- return false;
- if (this.task_end != that.task_end)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_task_start = true;
- builder.append(present_task_start);
- if (present_task_start)
- builder.append(task_start);
-
- boolean present_task_end = true;
- builder.append(present_task_end);
- if (present_task_end)
- builder.append(task_end);
-
- return builder.toHashCode();
- }
-
- public int compareTo(ExecutorInfo other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- ExecutorInfo typedOther = (ExecutorInfo)other;
-
- lastComparison = Boolean.valueOf(is_set_task_start()).compareTo(typedOther.is_set_task_start());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_task_start()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.task_start, typedOther.task_start);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_task_end()).compareTo(typedOther.is_set_task_end());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_task_end()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.task_end, typedOther.task_end);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // TASK_START
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.task_start = iprot.readI32();
- set_task_start_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // TASK_END
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.task_end = iprot.readI32();
- set_task_end_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldBegin(TASK_START_FIELD_DESC);
- oprot.writeI32(this.task_start);
- oprot.writeFieldEnd();
- oprot.writeFieldBegin(TASK_END_FIELD_DESC);
- oprot.writeI32(this.task_end);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ExecutorInfo(");
- boolean first = true;
-
- sb.append("task_start:");
- sb.append(this.task_start);
- first = false;
- if (!first) sb.append(", ");
- sb.append("task_end:");
- sb.append(this.task_end);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_task_start()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'task_start' is unset! Struct:" + toString());
- }
-
- if (!is_set_task_end()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'task_end' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ExecutorSpecificStats.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ExecutorSpecificStats.java b/jstorm-client/src/main/java/backtype/storm/generated/ExecutorSpecificStats.java
deleted file mode 100644
index fc2108f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ExecutorSpecificStats.java
+++ /dev/null
@@ -1,318 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ExecutorSpecificStats extends org.apache.thrift7.TUnion<ExecutorSpecificStats, ExecutorSpecificStats._Fields> {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ExecutorSpecificStats");
- private static final org.apache.thrift7.protocol.TField BOLT_FIELD_DESC = new org.apache.thrift7.protocol.TField("bolt", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
- private static final org.apache.thrift7.protocol.TField SPOUT_FIELD_DESC = new org.apache.thrift7.protocol.TField("spout", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- BOLT((short)1, "bolt"),
- SPOUT((short)2, "spout");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // BOLT
- return BOLT;
- case 2: // SPOUT
- return SPOUT;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.BOLT, new org.apache.thrift7.meta_data.FieldMetaData("bolt", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, BoltStats.class)));
- tmpMap.put(_Fields.SPOUT, new org.apache.thrift7.meta_data.FieldMetaData("spout", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, SpoutStats.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ExecutorSpecificStats.class, metaDataMap);
- }
-
- public ExecutorSpecificStats() {
- super();
- }
-
- public ExecutorSpecificStats(_Fields setField, Object value) {
- super(setField, value);
- }
-
- public ExecutorSpecificStats(ExecutorSpecificStats other) {
- super(other);
- }
- public ExecutorSpecificStats deepCopy() {
- return new ExecutorSpecificStats(this);
- }
-
- public static ExecutorSpecificStats bolt(BoltStats value) {
- ExecutorSpecificStats x = new ExecutorSpecificStats();
- x.set_bolt(value);
- return x;
- }
-
- public static ExecutorSpecificStats spout(SpoutStats value) {
- ExecutorSpecificStats x = new ExecutorSpecificStats();
- x.set_spout(value);
- return x;
- }
-
-
- @Override
- protected void checkType(_Fields setField, Object value) throws ClassCastException {
- switch (setField) {
- case BOLT:
- if (value instanceof BoltStats) {
- break;
- }
- throw new ClassCastException("Was expecting value of type BoltStats for field 'bolt', but got " + value.getClass().getSimpleName());
- case SPOUT:
- if (value instanceof SpoutStats) {
- break;
- }
- throw new ClassCastException("Was expecting value of type SpoutStats for field 'spout', but got " + value.getClass().getSimpleName());
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected Object readValue(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TField field) throws org.apache.thrift7.TException {
- _Fields setField = _Fields.findByThriftId(field.id);
- if (setField != null) {
- switch (setField) {
- case BOLT:
- if (field.type == BOLT_FIELD_DESC.type) {
- BoltStats bolt;
- bolt = new BoltStats();
- bolt.read(iprot);
- return bolt;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case SPOUT:
- if (field.type == SPOUT_FIELD_DESC.type) {
- SpoutStats spout;
- spout = new SpoutStats();
- spout.read(iprot);
- return spout;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- default:
- throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- }
-
- @Override
- protected void writeValue(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- switch (setField_) {
- case BOLT:
- BoltStats bolt = (BoltStats)value_;
- bolt.write(oprot);
- return;
- case SPOUT:
- SpoutStats spout = (SpoutStats)value_;
- spout.write(oprot);
- return;
- default:
- throw new IllegalStateException("Cannot write union with unknown field " + setField_);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TField getFieldDesc(_Fields setField) {
- switch (setField) {
- case BOLT:
- return BOLT_FIELD_DESC;
- case SPOUT:
- return SPOUT_FIELD_DESC;
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TStruct getStructDesc() {
- return STRUCT_DESC;
- }
-
- @Override
- protected _Fields enumForId(short id) {
- return _Fields.findByThriftIdOrThrow(id);
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
-
- public BoltStats get_bolt() {
- if (getSetField() == _Fields.BOLT) {
- return (BoltStats)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'bolt' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_bolt(BoltStats value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.BOLT;
- value_ = value;
- }
-
- public SpoutStats get_spout() {
- if (getSetField() == _Fields.SPOUT) {
- return (SpoutStats)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'spout' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_spout(SpoutStats value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.SPOUT;
- value_ = value;
- }
-
- public boolean is_set_bolt() {
- return setField_ == _Fields.BOLT;
- }
-
-
- public boolean is_set_spout() {
- return setField_ == _Fields.SPOUT;
- }
-
-
- public boolean equals(Object other) {
- if (other instanceof ExecutorSpecificStats) {
- return equals((ExecutorSpecificStats)other);
- } else {
- return false;
- }
- }
-
- public boolean equals(ExecutorSpecificStats other) {
- return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
- }
-
- @Override
- public int compareTo(ExecutorSpecificStats other) {
- int lastComparison = org.apache.thrift7.TBaseHelper.compareTo(getSetField(), other.getSetField());
- if (lastComparison == 0) {
- return org.apache.thrift7.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
- }
- return lastComparison;
- }
-
-
- @Override
- public int hashCode() {
- HashCodeBuilder hcb = new HashCodeBuilder();
- hcb.append(this.getClass().getName());
- org.apache.thrift7.TFieldIdEnum setField = getSetField();
- if (setField != null) {
- hcb.append(setField.getThriftFieldId());
- Object value = getFieldValue();
- if (value instanceof org.apache.thrift7.TEnum) {
- hcb.append(((org.apache.thrift7.TEnum)getFieldValue()).getValue());
- } else {
- hcb.append(value);
- }
- }
- return hcb.toHashCode();
- }
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/GlobalStreamId.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/GlobalStreamId.java b/jstorm-client/src/main/java/backtype/storm/generated/GlobalStreamId.java
deleted file mode 100644
index f443dcf..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/GlobalStreamId.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class GlobalStreamId implements org.apache.thrift7.TBase<GlobalStreamId, GlobalStreamId._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("GlobalStreamId");
-
- private static final org.apache.thrift7.protocol.TField COMPONENT_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("componentId", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField STREAM_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("streamId", org.apache.thrift7.protocol.TType.STRING, (short)2);
-
- private String componentId; // required
- private String streamId; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- COMPONENT_ID((short)1, "componentId"),
- STREAM_ID((short)2, "streamId");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // COMPONENT_ID
- return COMPONENT_ID;
- case 2: // STREAM_ID
- return STREAM_ID;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.COMPONENT_ID, new org.apache.thrift7.meta_data.FieldMetaData("componentId", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.STREAM_ID, new org.apache.thrift7.meta_data.FieldMetaData("streamId", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(GlobalStreamId.class, metaDataMap);
- }
-
- public GlobalStreamId() {
- }
-
- public GlobalStreamId(
- String componentId,
- String streamId)
- {
- this();
- this.componentId = componentId;
- this.streamId = streamId;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public GlobalStreamId(GlobalStreamId other) {
- if (other.is_set_componentId()) {
- this.componentId = other.componentId;
- }
- if (other.is_set_streamId()) {
- this.streamId = other.streamId;
- }
- }
-
- public GlobalStreamId deepCopy() {
- return new GlobalStreamId(this);
- }
-
- @Override
- public void clear() {
- this.componentId = null;
- this.streamId = null;
- }
-
- public String get_componentId() {
- return this.componentId;
- }
-
- public void set_componentId(String componentId) {
- this.componentId = componentId;
- }
-
- public void unset_componentId() {
- this.componentId = null;
- }
-
- /** Returns true if field componentId is set (has been assigned a value) and false otherwise */
- public boolean is_set_componentId() {
- return this.componentId != null;
- }
-
- public void set_componentId_isSet(boolean value) {
- if (!value) {
- this.componentId = null;
- }
- }
-
- public String get_streamId() {
- return this.streamId;
- }
-
- public void set_streamId(String streamId) {
- this.streamId = streamId;
- }
-
- public void unset_streamId() {
- this.streamId = null;
- }
-
- /** Returns true if field streamId is set (has been assigned a value) and false otherwise */
- public boolean is_set_streamId() {
- return this.streamId != null;
- }
-
- public void set_streamId_isSet(boolean value) {
- if (!value) {
- this.streamId = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case COMPONENT_ID:
- if (value == null) {
- unset_componentId();
- } else {
- set_componentId((String)value);
- }
- break;
-
- case STREAM_ID:
- if (value == null) {
- unset_streamId();
- } else {
- set_streamId((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case COMPONENT_ID:
- return get_componentId();
-
- case STREAM_ID:
- return get_streamId();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case COMPONENT_ID:
- return is_set_componentId();
- case STREAM_ID:
- return is_set_streamId();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof GlobalStreamId)
- return this.equals((GlobalStreamId)that);
- return false;
- }
-
- public boolean equals(GlobalStreamId that) {
- if (that == null)
- return false;
-
- boolean this_present_componentId = true && this.is_set_componentId();
- boolean that_present_componentId = true && that.is_set_componentId();
- if (this_present_componentId || that_present_componentId) {
- if (!(this_present_componentId && that_present_componentId))
- return false;
- if (!this.componentId.equals(that.componentId))
- return false;
- }
-
- boolean this_present_streamId = true && this.is_set_streamId();
- boolean that_present_streamId = true && that.is_set_streamId();
- if (this_present_streamId || that_present_streamId) {
- if (!(this_present_streamId && that_present_streamId))
- return false;
- if (!this.streamId.equals(that.streamId))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_componentId = true && (is_set_componentId());
- builder.append(present_componentId);
- if (present_componentId)
- builder.append(componentId);
-
- boolean present_streamId = true && (is_set_streamId());
- builder.append(present_streamId);
- if (present_streamId)
- builder.append(streamId);
-
- return builder.toHashCode();
- }
-
- public int compareTo(GlobalStreamId other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- GlobalStreamId typedOther = (GlobalStreamId)other;
-
- lastComparison = Boolean.valueOf(is_set_componentId()).compareTo(typedOther.is_set_componentId());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_componentId()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.componentId, typedOther.componentId);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_streamId()).compareTo(typedOther.is_set_streamId());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_streamId()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.streamId, typedOther.streamId);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // COMPONENT_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.componentId = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // STREAM_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.streamId = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.componentId != null) {
- oprot.writeFieldBegin(COMPONENT_ID_FIELD_DESC);
- oprot.writeString(this.componentId);
- oprot.writeFieldEnd();
- }
- if (this.streamId != null) {
- oprot.writeFieldBegin(STREAM_ID_FIELD_DESC);
- oprot.writeString(this.streamId);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("GlobalStreamId(");
- boolean first = true;
-
- sb.append("componentId:");
- if (this.componentId == null) {
- sb.append("null");
- } else {
- sb.append(this.componentId);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("streamId:");
- if (this.streamId == null) {
- sb.append("null");
- } else {
- sb.append(this.streamId);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_componentId()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'componentId' is unset! Struct:" + toString());
- }
-
- if (!is_set_streamId()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'streamId' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/Grouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/Grouping.java b/jstorm-client/src/main/java/backtype/storm/generated/Grouping.java
deleted file mode 100644
index 45a477c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/Grouping.java
+++ /dev/null
@@ -1,714 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Grouping extends org.apache.thrift7.TUnion<Grouping, Grouping._Fields> {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("Grouping");
- private static final org.apache.thrift7.protocol.TField FIELDS_FIELD_DESC = new org.apache.thrift7.protocol.TField("fields", org.apache.thrift7.protocol.TType.LIST, (short)1);
- private static final org.apache.thrift7.protocol.TField SHUFFLE_FIELD_DESC = new org.apache.thrift7.protocol.TField("shuffle", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
- private static final org.apache.thrift7.protocol.TField ALL_FIELD_DESC = new org.apache.thrift7.protocol.TField("all", org.apache.thrift7.protocol.TType.STRUCT, (short)3);
- private static final org.apache.thrift7.protocol.TField NONE_FIELD_DESC = new org.apache.thrift7.protocol.TField("none", org.apache.thrift7.protocol.TType.STRUCT, (short)4);
- private static final org.apache.thrift7.protocol.TField DIRECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("direct", org.apache.thrift7.protocol.TType.STRUCT, (short)5);
- private static final org.apache.thrift7.protocol.TField CUSTOM_OBJECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("custom_object", org.apache.thrift7.protocol.TType.STRUCT, (short)6);
- private static final org.apache.thrift7.protocol.TField CUSTOM_SERIALIZED_FIELD_DESC = new org.apache.thrift7.protocol.TField("custom_serialized", org.apache.thrift7.protocol.TType.STRING, (short)7);
- private static final org.apache.thrift7.protocol.TField LOCAL_OR_SHUFFLE_FIELD_DESC = new org.apache.thrift7.protocol.TField("local_or_shuffle", org.apache.thrift7.protocol.TType.STRUCT, (short)8);
- private static final org.apache.thrift7.protocol.TField LOCAL_FIRST_FIELD_DESC = new org.apache.thrift7.protocol.TField("localFirst", org.apache.thrift7.protocol.TType.STRUCT, (short)9);
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- FIELDS((short)1, "fields"),
- SHUFFLE((short)2, "shuffle"),
- ALL((short)3, "all"),
- NONE((short)4, "none"),
- DIRECT((short)5, "direct"),
- CUSTOM_OBJECT((short)6, "custom_object"),
- CUSTOM_SERIALIZED((short)7, "custom_serialized"),
- LOCAL_OR_SHUFFLE((short)8, "local_or_shuffle"),
- LOCAL_FIRST((short)9, "localFirst");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // FIELDS
- return FIELDS;
- case 2: // SHUFFLE
- return SHUFFLE;
- case 3: // ALL
- return ALL;
- case 4: // NONE
- return NONE;
- case 5: // DIRECT
- return DIRECT;
- case 6: // CUSTOM_OBJECT
- return CUSTOM_OBJECT;
- case 7: // CUSTOM_SERIALIZED
- return CUSTOM_SERIALIZED;
- case 8: // LOCAL_OR_SHUFFLE
- return LOCAL_OR_SHUFFLE;
- case 9: // LOCAL_FIRST
- return LOCAL_FIRST;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FIELDS, new org.apache.thrift7.meta_data.FieldMetaData("fields", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING))));
- tmpMap.put(_Fields.SHUFFLE, new org.apache.thrift7.meta_data.FieldMetaData("shuffle", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- tmpMap.put(_Fields.ALL, new org.apache.thrift7.meta_data.FieldMetaData("all", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- tmpMap.put(_Fields.NONE, new org.apache.thrift7.meta_data.FieldMetaData("none", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- tmpMap.put(_Fields.DIRECT, new org.apache.thrift7.meta_data.FieldMetaData("direct", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- tmpMap.put(_Fields.CUSTOM_OBJECT, new org.apache.thrift7.meta_data.FieldMetaData("custom_object", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, JavaObject.class)));
- tmpMap.put(_Fields.CUSTOM_SERIALIZED, new org.apache.thrift7.meta_data.FieldMetaData("custom_serialized", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING , true)));
- tmpMap.put(_Fields.LOCAL_OR_SHUFFLE, new org.apache.thrift7.meta_data.FieldMetaData("local_or_shuffle", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- tmpMap.put(_Fields.LOCAL_FIRST, new org.apache.thrift7.meta_data.FieldMetaData("localFirst", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, NullStruct.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(Grouping.class, metaDataMap);
- }
-
- public Grouping() {
- super();
- }
-
- public Grouping(_Fields setField, Object value) {
- super(setField, value);
- }
-
- public Grouping(Grouping other) {
- super(other);
- }
- public Grouping deepCopy() {
- return new Grouping(this);
- }
-
- public static Grouping fields(List<String> value) {
- Grouping x = new Grouping();
- x.set_fields(value);
- return x;
- }
-
- public static Grouping shuffle(NullStruct value) {
- Grouping x = new Grouping();
- x.set_shuffle(value);
- return x;
- }
-
- public static Grouping all(NullStruct value) {
- Grouping x = new Grouping();
- x.set_all(value);
- return x;
- }
-
- public static Grouping none(NullStruct value) {
- Grouping x = new Grouping();
- x.set_none(value);
- return x;
- }
-
- public static Grouping direct(NullStruct value) {
- Grouping x = new Grouping();
- x.set_direct(value);
- return x;
- }
-
- public static Grouping custom_object(JavaObject value) {
- Grouping x = new Grouping();
- x.set_custom_object(value);
- return x;
- }
-
- public static Grouping custom_serialized(ByteBuffer value) {
- Grouping x = new Grouping();
- x.set_custom_serialized(value);
- return x;
- }
-
- public static Grouping custom_serialized(byte[] value) {
- Grouping x = new Grouping();
- x.set_custom_serialized(ByteBuffer.wrap(value));
- return x;
- }
-
- public static Grouping local_or_shuffle(NullStruct value) {
- Grouping x = new Grouping();
- x.set_local_or_shuffle(value);
- return x;
- }
-
- public static Grouping localFirst(NullStruct value) {
- Grouping x = new Grouping();
- x.set_localFirst(value);
- return x;
- }
-
-
- @Override
- protected void checkType(_Fields setField, Object value) throws ClassCastException {
- switch (setField) {
- case FIELDS:
- if (value instanceof List) {
- break;
- }
- throw new ClassCastException("Was expecting value of type List<String> for field 'fields', but got " + value.getClass().getSimpleName());
- case SHUFFLE:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'shuffle', but got " + value.getClass().getSimpleName());
- case ALL:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'all', but got " + value.getClass().getSimpleName());
- case NONE:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'none', but got " + value.getClass().getSimpleName());
- case DIRECT:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'direct', but got " + value.getClass().getSimpleName());
- case CUSTOM_OBJECT:
- if (value instanceof JavaObject) {
- break;
- }
- throw new ClassCastException("Was expecting value of type JavaObject for field 'custom_object', but got " + value.getClass().getSimpleName());
- case CUSTOM_SERIALIZED:
- if (value instanceof ByteBuffer) {
- break;
- }
- throw new ClassCastException("Was expecting value of type ByteBuffer for field 'custom_serialized', but got " + value.getClass().getSimpleName());
- case LOCAL_OR_SHUFFLE:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'local_or_shuffle', but got " + value.getClass().getSimpleName());
- case LOCAL_FIRST:
- if (value instanceof NullStruct) {
- break;
- }
- throw new ClassCastException("Was expecting value of type NullStruct for field 'localFirst', but got " + value.getClass().getSimpleName());
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected Object readValue(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TField field) throws org.apache.thrift7.TException {
- _Fields setField = _Fields.findByThriftId(field.id);
- if (setField != null) {
- switch (setField) {
- case FIELDS:
- if (field.type == FIELDS_FIELD_DESC.type) {
- List<String> fields;
- {
- org.apache.thrift7.protocol.TList _list4 = iprot.readListBegin();
- fields = new ArrayList<String>(_list4.size);
- for (int _i5 = 0; _i5 < _list4.size; ++_i5)
- {
- String _elem6; // required
- _elem6 = iprot.readString();
- fields.add(_elem6);
- }
- iprot.readListEnd();
- }
- return fields;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case SHUFFLE:
- if (field.type == SHUFFLE_FIELD_DESC.type) {
- NullStruct shuffle;
- shuffle = new NullStruct();
- shuffle.read(iprot);
- return shuffle;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case ALL:
- if (field.type == ALL_FIELD_DESC.type) {
- NullStruct all;
- all = new NullStruct();
- all.read(iprot);
- return all;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case NONE:
- if (field.type == NONE_FIELD_DESC.type) {
- NullStruct none;
- none = new NullStruct();
- none.read(iprot);
- return none;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case DIRECT:
- if (field.type == DIRECT_FIELD_DESC.type) {
- NullStruct direct;
- direct = new NullStruct();
- direct.read(iprot);
- return direct;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case CUSTOM_OBJECT:
- if (field.type == CUSTOM_OBJECT_FIELD_DESC.type) {
- JavaObject custom_object;
- custom_object = new JavaObject();
- custom_object.read(iprot);
- return custom_object;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case CUSTOM_SERIALIZED:
- if (field.type == CUSTOM_SERIALIZED_FIELD_DESC.type) {
- ByteBuffer custom_serialized;
- custom_serialized = iprot.readBinary();
- return custom_serialized;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case LOCAL_OR_SHUFFLE:
- if (field.type == LOCAL_OR_SHUFFLE_FIELD_DESC.type) {
- NullStruct local_or_shuffle;
- local_or_shuffle = new NullStruct();
- local_or_shuffle.read(iprot);
- return local_or_shuffle;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case LOCAL_FIRST:
- if (field.type == LOCAL_FIRST_FIELD_DESC.type) {
- NullStruct localFirst;
- localFirst = new NullStruct();
- localFirst.read(iprot);
- return localFirst;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- default:
- throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- }
-
- @Override
- protected void writeValue(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- switch (setField_) {
- case FIELDS:
- List<String> fields = (List<String>)value_;
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRING, fields.size()));
- for (String _iter7 : fields)
- {
- oprot.writeString(_iter7);
- }
- oprot.writeListEnd();
- }
- return;
- case SHUFFLE:
- NullStruct shuffle = (NullStruct)value_;
- shuffle.write(oprot);
- return;
- case ALL:
- NullStruct all = (NullStruct)value_;
- all.write(oprot);
- return;
- case NONE:
- NullStruct none = (NullStruct)value_;
- none.write(oprot);
- return;
- case DIRECT:
- NullStruct direct = (NullStruct)value_;
- direct.write(oprot);
- return;
- case CUSTOM_OBJECT:
- JavaObject custom_object = (JavaObject)value_;
- custom_object.write(oprot);
- return;
- case CUSTOM_SERIALIZED:
- ByteBuffer custom_serialized = (ByteBuffer)value_;
- oprot.writeBinary(custom_serialized);
- return;
- case LOCAL_OR_SHUFFLE:
- NullStruct local_or_shuffle = (NullStruct)value_;
- local_or_shuffle.write(oprot);
- return;
- case LOCAL_FIRST:
- NullStruct localFirst = (NullStruct)value_;
- localFirst.write(oprot);
- return;
- default:
- throw new IllegalStateException("Cannot write union with unknown field " + setField_);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TField getFieldDesc(_Fields setField) {
- switch (setField) {
- case FIELDS:
- return FIELDS_FIELD_DESC;
- case SHUFFLE:
- return SHUFFLE_FIELD_DESC;
- case ALL:
- return ALL_FIELD_DESC;
- case NONE:
- return NONE_FIELD_DESC;
- case DIRECT:
- return DIRECT_FIELD_DESC;
- case CUSTOM_OBJECT:
- return CUSTOM_OBJECT_FIELD_DESC;
- case CUSTOM_SERIALIZED:
- return CUSTOM_SERIALIZED_FIELD_DESC;
- case LOCAL_OR_SHUFFLE:
- return LOCAL_OR_SHUFFLE_FIELD_DESC;
- case LOCAL_FIRST:
- return LOCAL_FIRST_FIELD_DESC;
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TStruct getStructDesc() {
- return STRUCT_DESC;
- }
-
- @Override
- protected _Fields enumForId(short id) {
- return _Fields.findByThriftIdOrThrow(id);
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
-
- public List<String> get_fields() {
- if (getSetField() == _Fields.FIELDS) {
- return (List<String>)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'fields' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_fields(List<String> value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.FIELDS;
- value_ = value;
- }
-
- public NullStruct get_shuffle() {
- if (getSetField() == _Fields.SHUFFLE) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'shuffle' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_shuffle(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.SHUFFLE;
- value_ = value;
- }
-
- public NullStruct get_all() {
- if (getSetField() == _Fields.ALL) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'all' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_all(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.ALL;
- value_ = value;
- }
-
- public NullStruct get_none() {
- if (getSetField() == _Fields.NONE) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'none' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_none(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.NONE;
- value_ = value;
- }
-
- public NullStruct get_direct() {
- if (getSetField() == _Fields.DIRECT) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'direct' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_direct(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.DIRECT;
- value_ = value;
- }
-
- public JavaObject get_custom_object() {
- if (getSetField() == _Fields.CUSTOM_OBJECT) {
- return (JavaObject)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'custom_object' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_custom_object(JavaObject value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.CUSTOM_OBJECT;
- value_ = value;
- }
-
- public byte[] get_custom_serialized() {
- set_custom_serialized(org.apache.thrift7.TBaseHelper.rightSize(buffer_for_custom_serialized()));
- ByteBuffer b = buffer_for_custom_serialized();
- return b == null ? null : b.array();
- }
-
- public ByteBuffer buffer_for_custom_serialized() {
- if (getSetField() == _Fields.CUSTOM_SERIALIZED) {
- return (ByteBuffer)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'custom_serialized' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_custom_serialized(byte[] value) {
- set_custom_serialized(ByteBuffer.wrap(value));
- }
-
- public void set_custom_serialized(ByteBuffer value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.CUSTOM_SERIALIZED;
- value_ = value;
- }
-
- public NullStruct get_local_or_shuffle() {
- if (getSetField() == _Fields.LOCAL_OR_SHUFFLE) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'local_or_shuffle' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_local_or_shuffle(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.LOCAL_OR_SHUFFLE;
- value_ = value;
- }
-
- public NullStruct get_localFirst() {
- if (getSetField() == _Fields.LOCAL_FIRST) {
- return (NullStruct)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'localFirst' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_localFirst(NullStruct value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.LOCAL_FIRST;
- value_ = value;
- }
-
- public boolean is_set_fields() {
- return setField_ == _Fields.FIELDS;
- }
-
-
- public boolean is_set_shuffle() {
- return setField_ == _Fields.SHUFFLE;
- }
-
-
- public boolean is_set_all() {
- return setField_ == _Fields.ALL;
- }
-
-
- public boolean is_set_none() {
- return setField_ == _Fields.NONE;
- }
-
-
- public boolean is_set_direct() {
- return setField_ == _Fields.DIRECT;
- }
-
-
- public boolean is_set_custom_object() {
- return setField_ == _Fields.CUSTOM_OBJECT;
- }
-
-
- public boolean is_set_custom_serialized() {
- return setField_ == _Fields.CUSTOM_SERIALIZED;
- }
-
-
- public boolean is_set_local_or_shuffle() {
- return setField_ == _Fields.LOCAL_OR_SHUFFLE;
- }
-
-
- public boolean is_set_localFirst() {
- return setField_ == _Fields.LOCAL_FIRST;
- }
-
-
- public boolean equals(Object other) {
- if (other instanceof Grouping) {
- return equals((Grouping)other);
- } else {
- return false;
- }
- }
-
- public boolean equals(Grouping other) {
- return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
- }
-
- @Override
- public int compareTo(Grouping other) {
- int lastComparison = org.apache.thrift7.TBaseHelper.compareTo(getSetField(), other.getSetField());
- if (lastComparison == 0) {
- return org.apache.thrift7.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
- }
- return lastComparison;
- }
-
-
- @Override
- public int hashCode() {
- HashCodeBuilder hcb = new HashCodeBuilder();
- hcb.append(this.getClass().getName());
- org.apache.thrift7.TFieldIdEnum setField = getSetField();
- if (setField != null) {
- hcb.append(setField.getThriftFieldId());
- Object value = getFieldValue();
- if (value instanceof org.apache.thrift7.TEnum) {
- hcb.append(((org.apache.thrift7.TEnum)getFieldValue()).getValue());
- } else {
- hcb.append(value);
- }
- }
- return hcb.toHashCode();
- }
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/InvalidTopologyException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/InvalidTopologyException.java b/jstorm-client/src/main/java/backtype/storm/generated/InvalidTopologyException.java
deleted file mode 100644
index a52fbfe..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/InvalidTopologyException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class InvalidTopologyException extends Exception implements org.apache.thrift7.TBase<InvalidTopologyException, InvalidTopologyException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("InvalidTopologyException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(InvalidTopologyException.class, metaDataMap);
- }
-
- public InvalidTopologyException() {
- }
-
- public InvalidTopologyException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public InvalidTopologyException(InvalidTopologyException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public InvalidTopologyException deepCopy() {
- return new InvalidTopologyException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof InvalidTopologyException)
- return this.equals((InvalidTopologyException)that);
- return false;
- }
-
- public boolean equals(InvalidTopologyException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(InvalidTopologyException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- InvalidTopologyException typedOther = (InvalidTopologyException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("InvalidTopologyException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[37/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/JavaObject.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/JavaObject.java b/jstorm-client/src/main/java/backtype/storm/generated/JavaObject.java
deleted file mode 100644
index f6fe430..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/JavaObject.java
+++ /dev/null
@@ -1,463 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class JavaObject implements org.apache.thrift7.TBase<JavaObject, JavaObject._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("JavaObject");
-
- private static final org.apache.thrift7.protocol.TField FULL_CLASS_NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("full_class_name", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField ARGS_LIST_FIELD_DESC = new org.apache.thrift7.protocol.TField("args_list", org.apache.thrift7.protocol.TType.LIST, (short)2);
-
- private String full_class_name; // required
- private List<JavaObjectArg> args_list; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- FULL_CLASS_NAME((short)1, "full_class_name"),
- ARGS_LIST((short)2, "args_list");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // FULL_CLASS_NAME
- return FULL_CLASS_NAME;
- case 2: // ARGS_LIST
- return ARGS_LIST;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FULL_CLASS_NAME, new org.apache.thrift7.meta_data.FieldMetaData("full_class_name", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.ARGS_LIST, new org.apache.thrift7.meta_data.FieldMetaData("args_list", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, JavaObjectArg.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(JavaObject.class, metaDataMap);
- }
-
- public JavaObject() {
- }
-
- public JavaObject(
- String full_class_name,
- List<JavaObjectArg> args_list)
- {
- this();
- this.full_class_name = full_class_name;
- this.args_list = args_list;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public JavaObject(JavaObject other) {
- if (other.is_set_full_class_name()) {
- this.full_class_name = other.full_class_name;
- }
- if (other.is_set_args_list()) {
- List<JavaObjectArg> __this__args_list = new ArrayList<JavaObjectArg>();
- for (JavaObjectArg other_element : other.args_list) {
- __this__args_list.add(new JavaObjectArg(other_element));
- }
- this.args_list = __this__args_list;
- }
- }
-
- public JavaObject deepCopy() {
- return new JavaObject(this);
- }
-
- @Override
- public void clear() {
- this.full_class_name = null;
- this.args_list = null;
- }
-
- public String get_full_class_name() {
- return this.full_class_name;
- }
-
- public void set_full_class_name(String full_class_name) {
- this.full_class_name = full_class_name;
- }
-
- public void unset_full_class_name() {
- this.full_class_name = null;
- }
-
- /** Returns true if field full_class_name is set (has been assigned a value) and false otherwise */
- public boolean is_set_full_class_name() {
- return this.full_class_name != null;
- }
-
- public void set_full_class_name_isSet(boolean value) {
- if (!value) {
- this.full_class_name = null;
- }
- }
-
- public int get_args_list_size() {
- return (this.args_list == null) ? 0 : this.args_list.size();
- }
-
- public java.util.Iterator<JavaObjectArg> get_args_list_iterator() {
- return (this.args_list == null) ? null : this.args_list.iterator();
- }
-
- public void add_to_args_list(JavaObjectArg elem) {
- if (this.args_list == null) {
- this.args_list = new ArrayList<JavaObjectArg>();
- }
- this.args_list.add(elem);
- }
-
- public List<JavaObjectArg> get_args_list() {
- return this.args_list;
- }
-
- public void set_args_list(List<JavaObjectArg> args_list) {
- this.args_list = args_list;
- }
-
- public void unset_args_list() {
- this.args_list = null;
- }
-
- /** Returns true if field args_list is set (has been assigned a value) and false otherwise */
- public boolean is_set_args_list() {
- return this.args_list != null;
- }
-
- public void set_args_list_isSet(boolean value) {
- if (!value) {
- this.args_list = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case FULL_CLASS_NAME:
- if (value == null) {
- unset_full_class_name();
- } else {
- set_full_class_name((String)value);
- }
- break;
-
- case ARGS_LIST:
- if (value == null) {
- unset_args_list();
- } else {
- set_args_list((List<JavaObjectArg>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case FULL_CLASS_NAME:
- return get_full_class_name();
-
- case ARGS_LIST:
- return get_args_list();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case FULL_CLASS_NAME:
- return is_set_full_class_name();
- case ARGS_LIST:
- return is_set_args_list();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof JavaObject)
- return this.equals((JavaObject)that);
- return false;
- }
-
- public boolean equals(JavaObject that) {
- if (that == null)
- return false;
-
- boolean this_present_full_class_name = true && this.is_set_full_class_name();
- boolean that_present_full_class_name = true && that.is_set_full_class_name();
- if (this_present_full_class_name || that_present_full_class_name) {
- if (!(this_present_full_class_name && that_present_full_class_name))
- return false;
- if (!this.full_class_name.equals(that.full_class_name))
- return false;
- }
-
- boolean this_present_args_list = true && this.is_set_args_list();
- boolean that_present_args_list = true && that.is_set_args_list();
- if (this_present_args_list || that_present_args_list) {
- if (!(this_present_args_list && that_present_args_list))
- return false;
- if (!this.args_list.equals(that.args_list))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_full_class_name = true && (is_set_full_class_name());
- builder.append(present_full_class_name);
- if (present_full_class_name)
- builder.append(full_class_name);
-
- boolean present_args_list = true && (is_set_args_list());
- builder.append(present_args_list);
- if (present_args_list)
- builder.append(args_list);
-
- return builder.toHashCode();
- }
-
- public int compareTo(JavaObject other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- JavaObject typedOther = (JavaObject)other;
-
- lastComparison = Boolean.valueOf(is_set_full_class_name()).compareTo(typedOther.is_set_full_class_name());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_full_class_name()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.full_class_name, typedOther.full_class_name);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_args_list()).compareTo(typedOther.is_set_args_list());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_args_list()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.args_list, typedOther.args_list);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // FULL_CLASS_NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.full_class_name = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // ARGS_LIST
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list0 = iprot.readListBegin();
- this.args_list = new ArrayList<JavaObjectArg>(_list0.size);
- for (int _i1 = 0; _i1 < _list0.size; ++_i1)
- {
- JavaObjectArg _elem2; // required
- _elem2 = new JavaObjectArg();
- _elem2.read(iprot);
- this.args_list.add(_elem2);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.full_class_name != null) {
- oprot.writeFieldBegin(FULL_CLASS_NAME_FIELD_DESC);
- oprot.writeString(this.full_class_name);
- oprot.writeFieldEnd();
- }
- if (this.args_list != null) {
- oprot.writeFieldBegin(ARGS_LIST_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.args_list.size()));
- for (JavaObjectArg _iter3 : this.args_list)
- {
- _iter3.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("JavaObject(");
- boolean first = true;
-
- sb.append("full_class_name:");
- if (this.full_class_name == null) {
- sb.append("null");
- } else {
- sb.append(this.full_class_name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("args_list:");
- if (this.args_list == null) {
- sb.append("null");
- } else {
- sb.append(this.args_list);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_full_class_name()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'full_class_name' is unset! Struct:" + toString());
- }
-
- if (!is_set_args_list()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'args_list' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/JavaObjectArg.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/JavaObjectArg.java b/jstorm-client/src/main/java/backtype/storm/generated/JavaObjectArg.java
deleted file mode 100644
index c9f6381..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/JavaObjectArg.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class JavaObjectArg extends org.apache.thrift7.TUnion<JavaObjectArg, JavaObjectArg._Fields> {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("JavaObjectArg");
- private static final org.apache.thrift7.protocol.TField INT_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("int_arg", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField LONG_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("long_arg", org.apache.thrift7.protocol.TType.I64, (short)2);
- private static final org.apache.thrift7.protocol.TField STRING_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("string_arg", org.apache.thrift7.protocol.TType.STRING, (short)3);
- private static final org.apache.thrift7.protocol.TField BOOL_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("bool_arg", org.apache.thrift7.protocol.TType.BOOL, (short)4);
- private static final org.apache.thrift7.protocol.TField BINARY_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("binary_arg", org.apache.thrift7.protocol.TType.STRING, (short)5);
- private static final org.apache.thrift7.protocol.TField DOUBLE_ARG_FIELD_DESC = new org.apache.thrift7.protocol.TField("double_arg", org.apache.thrift7.protocol.TType.DOUBLE, (short)6);
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- INT_ARG((short)1, "int_arg"),
- LONG_ARG((short)2, "long_arg"),
- STRING_ARG((short)3, "string_arg"),
- BOOL_ARG((short)4, "bool_arg"),
- BINARY_ARG((short)5, "binary_arg"),
- DOUBLE_ARG((short)6, "double_arg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // INT_ARG
- return INT_ARG;
- case 2: // LONG_ARG
- return LONG_ARG;
- case 3: // STRING_ARG
- return STRING_ARG;
- case 4: // BOOL_ARG
- return BOOL_ARG;
- case 5: // BINARY_ARG
- return BINARY_ARG;
- case 6: // DOUBLE_ARG
- return DOUBLE_ARG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.INT_ARG, new org.apache.thrift7.meta_data.FieldMetaData("int_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.LONG_ARG, new org.apache.thrift7.meta_data.FieldMetaData("long_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)));
- tmpMap.put(_Fields.STRING_ARG, new org.apache.thrift7.meta_data.FieldMetaData("string_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.BOOL_ARG, new org.apache.thrift7.meta_data.FieldMetaData("bool_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.BOOL)));
- tmpMap.put(_Fields.BINARY_ARG, new org.apache.thrift7.meta_data.FieldMetaData("binary_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING , true)));
- tmpMap.put(_Fields.DOUBLE_ARG, new org.apache.thrift7.meta_data.FieldMetaData("double_arg", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(JavaObjectArg.class, metaDataMap);
- }
-
- public JavaObjectArg() {
- super();
- }
-
- public JavaObjectArg(_Fields setField, Object value) {
- super(setField, value);
- }
-
- public JavaObjectArg(JavaObjectArg other) {
- super(other);
- }
- public JavaObjectArg deepCopy() {
- return new JavaObjectArg(this);
- }
-
- public static JavaObjectArg int_arg(int value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_int_arg(value);
- return x;
- }
-
- public static JavaObjectArg long_arg(long value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_long_arg(value);
- return x;
- }
-
- public static JavaObjectArg string_arg(String value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_string_arg(value);
- return x;
- }
-
- public static JavaObjectArg bool_arg(boolean value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_bool_arg(value);
- return x;
- }
-
- public static JavaObjectArg binary_arg(ByteBuffer value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_binary_arg(value);
- return x;
- }
-
- public static JavaObjectArg binary_arg(byte[] value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_binary_arg(ByteBuffer.wrap(value));
- return x;
- }
-
- public static JavaObjectArg double_arg(double value) {
- JavaObjectArg x = new JavaObjectArg();
- x.set_double_arg(value);
- return x;
- }
-
-
- @Override
- protected void checkType(_Fields setField, Object value) throws ClassCastException {
- switch (setField) {
- case INT_ARG:
- if (value instanceof Integer) {
- break;
- }
- throw new ClassCastException("Was expecting value of type Integer for field 'int_arg', but got " + value.getClass().getSimpleName());
- case LONG_ARG:
- if (value instanceof Long) {
- break;
- }
- throw new ClassCastException("Was expecting value of type Long for field 'long_arg', but got " + value.getClass().getSimpleName());
- case STRING_ARG:
- if (value instanceof String) {
- break;
- }
- throw new ClassCastException("Was expecting value of type String for field 'string_arg', but got " + value.getClass().getSimpleName());
- case BOOL_ARG:
- if (value instanceof Boolean) {
- break;
- }
- throw new ClassCastException("Was expecting value of type Boolean for field 'bool_arg', but got " + value.getClass().getSimpleName());
- case BINARY_ARG:
- if (value instanceof ByteBuffer) {
- break;
- }
- throw new ClassCastException("Was expecting value of type ByteBuffer for field 'binary_arg', but got " + value.getClass().getSimpleName());
- case DOUBLE_ARG:
- if (value instanceof Double) {
- break;
- }
- throw new ClassCastException("Was expecting value of type Double for field 'double_arg', but got " + value.getClass().getSimpleName());
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected Object readValue(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TField field) throws org.apache.thrift7.TException {
- _Fields setField = _Fields.findByThriftId(field.id);
- if (setField != null) {
- switch (setField) {
- case INT_ARG:
- if (field.type == INT_ARG_FIELD_DESC.type) {
- Integer int_arg;
- int_arg = iprot.readI32();
- return int_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case LONG_ARG:
- if (field.type == LONG_ARG_FIELD_DESC.type) {
- Long long_arg;
- long_arg = iprot.readI64();
- return long_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case STRING_ARG:
- if (field.type == STRING_ARG_FIELD_DESC.type) {
- String string_arg;
- string_arg = iprot.readString();
- return string_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case BOOL_ARG:
- if (field.type == BOOL_ARG_FIELD_DESC.type) {
- Boolean bool_arg;
- bool_arg = iprot.readBool();
- return bool_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case BINARY_ARG:
- if (field.type == BINARY_ARG_FIELD_DESC.type) {
- ByteBuffer binary_arg;
- binary_arg = iprot.readBinary();
- return binary_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- case DOUBLE_ARG:
- if (field.type == DOUBLE_ARG_FIELD_DESC.type) {
- Double double_arg;
- double_arg = iprot.readDouble();
- return double_arg;
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- default:
- throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- return null;
- }
- }
-
- @Override
- protected void writeValue(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- switch (setField_) {
- case INT_ARG:
- Integer int_arg = (Integer)value_;
- oprot.writeI32(int_arg);
- return;
- case LONG_ARG:
- Long long_arg = (Long)value_;
- oprot.writeI64(long_arg);
- return;
- case STRING_ARG:
- String string_arg = (String)value_;
- oprot.writeString(string_arg);
- return;
- case BOOL_ARG:
- Boolean bool_arg = (Boolean)value_;
- oprot.writeBool(bool_arg);
- return;
- case BINARY_ARG:
- ByteBuffer binary_arg = (ByteBuffer)value_;
- oprot.writeBinary(binary_arg);
- return;
- case DOUBLE_ARG:
- Double double_arg = (Double)value_;
- oprot.writeDouble(double_arg);
- return;
- default:
- throw new IllegalStateException("Cannot write union with unknown field " + setField_);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TField getFieldDesc(_Fields setField) {
- switch (setField) {
- case INT_ARG:
- return INT_ARG_FIELD_DESC;
- case LONG_ARG:
- return LONG_ARG_FIELD_DESC;
- case STRING_ARG:
- return STRING_ARG_FIELD_DESC;
- case BOOL_ARG:
- return BOOL_ARG_FIELD_DESC;
- case BINARY_ARG:
- return BINARY_ARG_FIELD_DESC;
- case DOUBLE_ARG:
- return DOUBLE_ARG_FIELD_DESC;
- default:
- throw new IllegalArgumentException("Unknown field id " + setField);
- }
- }
-
- @Override
- protected org.apache.thrift7.protocol.TStruct getStructDesc() {
- return STRUCT_DESC;
- }
-
- @Override
- protected _Fields enumForId(short id) {
- return _Fields.findByThriftIdOrThrow(id);
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
-
- public int get_int_arg() {
- if (getSetField() == _Fields.INT_ARG) {
- return (Integer)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'int_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_int_arg(int value) {
- setField_ = _Fields.INT_ARG;
- value_ = value;
- }
-
- public long get_long_arg() {
- if (getSetField() == _Fields.LONG_ARG) {
- return (Long)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'long_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_long_arg(long value) {
- setField_ = _Fields.LONG_ARG;
- value_ = value;
- }
-
- public String get_string_arg() {
- if (getSetField() == _Fields.STRING_ARG) {
- return (String)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'string_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_string_arg(String value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.STRING_ARG;
- value_ = value;
- }
-
- public boolean get_bool_arg() {
- if (getSetField() == _Fields.BOOL_ARG) {
- return (Boolean)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'bool_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_bool_arg(boolean value) {
- setField_ = _Fields.BOOL_ARG;
- value_ = value;
- }
-
- public byte[] get_binary_arg() {
- set_binary_arg(org.apache.thrift7.TBaseHelper.rightSize(buffer_for_binary_arg()));
- ByteBuffer b = buffer_for_binary_arg();
- return b == null ? null : b.array();
- }
-
- public ByteBuffer buffer_for_binary_arg() {
- if (getSetField() == _Fields.BINARY_ARG) {
- return (ByteBuffer)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'binary_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_binary_arg(byte[] value) {
- set_binary_arg(ByteBuffer.wrap(value));
- }
-
- public void set_binary_arg(ByteBuffer value) {
- if (value == null) throw new NullPointerException();
- setField_ = _Fields.BINARY_ARG;
- value_ = value;
- }
-
- public double get_double_arg() {
- if (getSetField() == _Fields.DOUBLE_ARG) {
- return (Double)getFieldValue();
- } else {
- throw new RuntimeException("Cannot get field 'double_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
- }
- }
-
- public void set_double_arg(double value) {
- setField_ = _Fields.DOUBLE_ARG;
- value_ = value;
- }
-
- public boolean is_set_int_arg() {
- return setField_ == _Fields.INT_ARG;
- }
-
-
- public boolean is_set_long_arg() {
- return setField_ == _Fields.LONG_ARG;
- }
-
-
- public boolean is_set_string_arg() {
- return setField_ == _Fields.STRING_ARG;
- }
-
-
- public boolean is_set_bool_arg() {
- return setField_ == _Fields.BOOL_ARG;
- }
-
-
- public boolean is_set_binary_arg() {
- return setField_ == _Fields.BINARY_ARG;
- }
-
-
- public boolean is_set_double_arg() {
- return setField_ == _Fields.DOUBLE_ARG;
- }
-
-
- public boolean equals(Object other) {
- if (other instanceof JavaObjectArg) {
- return equals((JavaObjectArg)other);
- } else {
- return false;
- }
- }
-
- public boolean equals(JavaObjectArg other) {
- return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
- }
-
- @Override
- public int compareTo(JavaObjectArg other) {
- int lastComparison = org.apache.thrift7.TBaseHelper.compareTo(getSetField(), other.getSetField());
- if (lastComparison == 0) {
- return org.apache.thrift7.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
- }
- return lastComparison;
- }
-
-
- @Override
- public int hashCode() {
- HashCodeBuilder hcb = new HashCodeBuilder();
- hcb.append(this.getClass().getName());
- org.apache.thrift7.TFieldIdEnum setField = getSetField();
- if (setField != null) {
- hcb.append(setField.getThriftFieldId());
- Object value = getFieldValue();
- if (value instanceof org.apache.thrift7.TEnum) {
- hcb.append(((org.apache.thrift7.TEnum)getFieldValue()).getValue());
- } else {
- hcb.append(value);
- }
- }
- return hcb.toHashCode();
- }
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/KillOptions.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/KillOptions.java b/jstorm-client/src/main/java/backtype/storm/generated/KillOptions.java
deleted file mode 100644
index cf07150..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/KillOptions.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class KillOptions implements org.apache.thrift7.TBase<KillOptions, KillOptions._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("KillOptions");
-
- private static final org.apache.thrift7.protocol.TField WAIT_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("wait_secs", org.apache.thrift7.protocol.TType.I32, (short)1);
-
- private int wait_secs; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- WAIT_SECS((short)1, "wait_secs");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // WAIT_SECS
- return WAIT_SECS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __WAIT_SECS_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.WAIT_SECS, new org.apache.thrift7.meta_data.FieldMetaData("wait_secs", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(KillOptions.class, metaDataMap);
- }
-
- public KillOptions() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public KillOptions(KillOptions other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.wait_secs = other.wait_secs;
- }
-
- public KillOptions deepCopy() {
- return new KillOptions(this);
- }
-
- @Override
- public void clear() {
- set_wait_secs_isSet(false);
- this.wait_secs = 0;
- }
-
- public int get_wait_secs() {
- return this.wait_secs;
- }
-
- public void set_wait_secs(int wait_secs) {
- this.wait_secs = wait_secs;
- set_wait_secs_isSet(true);
- }
-
- public void unset_wait_secs() {
- __isset_bit_vector.clear(__WAIT_SECS_ISSET_ID);
- }
-
- /** Returns true if field wait_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_wait_secs() {
- return __isset_bit_vector.get(__WAIT_SECS_ISSET_ID);
- }
-
- public void set_wait_secs_isSet(boolean value) {
- __isset_bit_vector.set(__WAIT_SECS_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case WAIT_SECS:
- if (value == null) {
- unset_wait_secs();
- } else {
- set_wait_secs((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case WAIT_SECS:
- return Integer.valueOf(get_wait_secs());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case WAIT_SECS:
- return is_set_wait_secs();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof KillOptions)
- return this.equals((KillOptions)that);
- return false;
- }
-
- public boolean equals(KillOptions that) {
- if (that == null)
- return false;
-
- boolean this_present_wait_secs = true && this.is_set_wait_secs();
- boolean that_present_wait_secs = true && that.is_set_wait_secs();
- if (this_present_wait_secs || that_present_wait_secs) {
- if (!(this_present_wait_secs && that_present_wait_secs))
- return false;
- if (this.wait_secs != that.wait_secs)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_wait_secs = true && (is_set_wait_secs());
- builder.append(present_wait_secs);
- if (present_wait_secs)
- builder.append(wait_secs);
-
- return builder.toHashCode();
- }
-
- public int compareTo(KillOptions other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- KillOptions typedOther = (KillOptions)other;
-
- lastComparison = Boolean.valueOf(is_set_wait_secs()).compareTo(typedOther.is_set_wait_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_wait_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.wait_secs, typedOther.wait_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // WAIT_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.wait_secs = iprot.readI32();
- set_wait_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (is_set_wait_secs()) {
- oprot.writeFieldBegin(WAIT_SECS_FIELD_DESC);
- oprot.writeI32(this.wait_secs);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("KillOptions(");
- boolean first = true;
-
- if (is_set_wait_secs()) {
- sb.append("wait_secs:");
- sb.append(this.wait_secs);
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/MonitorOptions.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/MonitorOptions.java b/jstorm-client/src/main/java/backtype/storm/generated/MonitorOptions.java
deleted file mode 100644
index fa0adf3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/MonitorOptions.java
+++ /dev/null
@@ -1,320 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class MonitorOptions implements org.apache.thrift7.TBase<MonitorOptions, MonitorOptions._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("MonitorOptions");
-
- private static final org.apache.thrift7.protocol.TField IS_ENABLE_FIELD_DESC = new org.apache.thrift7.protocol.TField("isEnable", org.apache.thrift7.protocol.TType.BOOL, (short)1);
-
- private boolean isEnable; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- IS_ENABLE((short)1, "isEnable");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // IS_ENABLE
- return IS_ENABLE;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __ISENABLE_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.IS_ENABLE, new org.apache.thrift7.meta_data.FieldMetaData("isEnable", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.BOOL)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(MonitorOptions.class, metaDataMap);
- }
-
- public MonitorOptions() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public MonitorOptions(MonitorOptions other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.isEnable = other.isEnable;
- }
-
- public MonitorOptions deepCopy() {
- return new MonitorOptions(this);
- }
-
- @Override
- public void clear() {
- set_isEnable_isSet(false);
- this.isEnable = false;
- }
-
- public boolean is_isEnable() {
- return this.isEnable;
- }
-
- public void set_isEnable(boolean isEnable) {
- this.isEnable = isEnable;
- set_isEnable_isSet(true);
- }
-
- public void unset_isEnable() {
- __isset_bit_vector.clear(__ISENABLE_ISSET_ID);
- }
-
- /** Returns true if field isEnable is set (has been assigned a value) and false otherwise */
- public boolean is_set_isEnable() {
- return __isset_bit_vector.get(__ISENABLE_ISSET_ID);
- }
-
- public void set_isEnable_isSet(boolean value) {
- __isset_bit_vector.set(__ISENABLE_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case IS_ENABLE:
- if (value == null) {
- unset_isEnable();
- } else {
- set_isEnable((Boolean)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case IS_ENABLE:
- return Boolean.valueOf(is_isEnable());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case IS_ENABLE:
- return is_set_isEnable();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof MonitorOptions)
- return this.equals((MonitorOptions)that);
- return false;
- }
-
- public boolean equals(MonitorOptions that) {
- if (that == null)
- return false;
-
- boolean this_present_isEnable = true && this.is_set_isEnable();
- boolean that_present_isEnable = true && that.is_set_isEnable();
- if (this_present_isEnable || that_present_isEnable) {
- if (!(this_present_isEnable && that_present_isEnable))
- return false;
- if (this.isEnable != that.isEnable)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_isEnable = true && (is_set_isEnable());
- builder.append(present_isEnable);
- if (present_isEnable)
- builder.append(isEnable);
-
- return builder.toHashCode();
- }
-
- public int compareTo(MonitorOptions other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- MonitorOptions typedOther = (MonitorOptions)other;
-
- lastComparison = Boolean.valueOf(is_set_isEnable()).compareTo(typedOther.is_set_isEnable());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_isEnable()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.isEnable, typedOther.isEnable);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // IS_ENABLE
- if (field.type == org.apache.thrift7.protocol.TType.BOOL) {
- this.isEnable = iprot.readBool();
- set_isEnable_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (is_set_isEnable()) {
- oprot.writeFieldBegin(IS_ENABLE_FIELD_DESC);
- oprot.writeBool(this.isEnable);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("MonitorOptions(");
- boolean first = true;
-
- if (is_set_isEnable()) {
- sb.append("isEnable:");
- sb.append(this.isEnable);
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[05/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPC.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPC.java b/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPC.java
new file mode 100644
index 0000000..ff3c112
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPC.java
@@ -0,0 +1,1195 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class DistributedRPC {
+
+ public interface Iface {
+
+ public String execute(String functionName, String funcArgs) throws DRPCExecutionException, org.apache.thrift.TException;
+
+ }
+
+ public interface AsyncIface {
+
+ public void execute(String functionName, String funcArgs, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ }
+
+ public static class Client extends org.apache.thrift.TServiceClient implements Iface {
+ public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
+ public Factory() {}
+ public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
+ return new Client(prot);
+ }
+ public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+ return new Client(iprot, oprot);
+ }
+ }
+
+ public Client(org.apache.thrift.protocol.TProtocol prot)
+ {
+ super(prot, prot);
+ }
+
+ public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+ super(iprot, oprot);
+ }
+
+ public String execute(String functionName, String funcArgs) throws DRPCExecutionException, org.apache.thrift.TException
+ {
+ send_execute(functionName, funcArgs);
+ return recv_execute();
+ }
+
+ public void send_execute(String functionName, String funcArgs) throws org.apache.thrift.TException
+ {
+ execute_args args = new execute_args();
+ args.set_functionName(functionName);
+ args.set_funcArgs(funcArgs);
+ sendBase("execute", args);
+ }
+
+ public String recv_execute() throws DRPCExecutionException, org.apache.thrift.TException
+ {
+ execute_result result = new execute_result();
+ receiveBase(result, "execute");
+ if (result.is_set_success()) {
+ return result.success;
+ }
+ if (result.e != null) {
+ throw result.e;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "execute failed: unknown result");
+ }
+
+ }
+ public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
+ public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
+ private org.apache.thrift.async.TAsyncClientManager clientManager;
+ private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
+ public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
+ this.clientManager = clientManager;
+ this.protocolFactory = protocolFactory;
+ }
+ public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
+ return new AsyncClient(protocolFactory, clientManager, transport);
+ }
+ }
+
+ public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
+ super(protocolFactory, clientManager, transport);
+ }
+
+ public void execute(String functionName, String funcArgs, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ execute_call method_call = new execute_call(functionName, funcArgs, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class execute_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String functionName;
+ private String funcArgs;
+ public execute_call(String functionName, String funcArgs, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.functionName = functionName;
+ this.funcArgs = funcArgs;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("execute", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ execute_args args = new execute_args();
+ args.set_functionName(functionName);
+ args.set_funcArgs(funcArgs);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public String getResult() throws DRPCExecutionException, org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_execute();
+ }
+ }
+
+ }
+
+ public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
+ private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+ public Processor(I iface) {
+ super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
+ }
+
+ protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
+ super(iface, getProcessMap(processMap));
+ }
+
+ private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
+ processMap.put("execute", new execute());
+ return processMap;
+ }
+
+ public static class execute<I extends Iface> extends org.apache.thrift.ProcessFunction<I, execute_args> {
+ public execute() {
+ super("execute");
+ }
+
+ public execute_args getEmptyArgsInstance() {
+ return new execute_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public execute_result getResult(I iface, execute_args args) throws org.apache.thrift.TException {
+ execute_result result = new execute_result();
+ try {
+ result.success = iface.execute(args.functionName, args.funcArgs);
+ } catch (DRPCExecutionException e) {
+ result.e = e;
+ }
+ return result;
+ }
+ }
+
+ }
+
+ public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
+ public AsyncProcessor(I iface) {
+ super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
+ }
+
+ protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
+ super(iface, getProcessMap(processMap));
+ }
+
+ private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
+ processMap.put("execute", new execute());
+ return processMap;
+ }
+
+ public static class execute<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, execute_args, String> {
+ public execute() {
+ super("execute");
+ }
+
+ public execute_args getEmptyArgsInstance() {
+ return new execute_args();
+ }
+
+ public AsyncMethodCallback<String> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<String>() {
+ public void onComplete(String o) {
+ execute_result result = new execute_result();
+ result.success = o;
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ execute_result result = new execute_result();
+ if (e instanceof DRPCExecutionException) {
+ result.e = (DRPCExecutionException) e;
+ result.set_e_isSet(true);
+ msg = result;
+ }
+ else
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, execute_args args, org.apache.thrift.async.AsyncMethodCallback<String> resultHandler) throws TException {
+ iface.execute(args.functionName, args.funcArgs,resultHandler);
+ }
+ }
+
+ }
+
+ public static class execute_args implements org.apache.thrift.TBase<execute_args, execute_args._Fields>, java.io.Serializable, Cloneable, Comparable<execute_args> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("execute_args");
+
+ private static final org.apache.thrift.protocol.TField FUNCTION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("functionName", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField FUNC_ARGS_FIELD_DESC = new org.apache.thrift.protocol.TField("funcArgs", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new execute_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new execute_argsTupleSchemeFactory());
+ }
+
+ private String functionName; // required
+ private String funcArgs; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ FUNCTION_NAME((short)1, "functionName"),
+ FUNC_ARGS((short)2, "funcArgs");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // FUNCTION_NAME
+ return FUNCTION_NAME;
+ case 2: // FUNC_ARGS
+ return FUNC_ARGS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FUNCTION_NAME, new org.apache.thrift.meta_data.FieldMetaData("functionName", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.FUNC_ARGS, new org.apache.thrift.meta_data.FieldMetaData("funcArgs", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(execute_args.class, metaDataMap);
+ }
+
+ public execute_args() {
+ }
+
+ public execute_args(
+ String functionName,
+ String funcArgs)
+ {
+ this();
+ this.functionName = functionName;
+ this.funcArgs = funcArgs;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public execute_args(execute_args other) {
+ if (other.is_set_functionName()) {
+ this.functionName = other.functionName;
+ }
+ if (other.is_set_funcArgs()) {
+ this.funcArgs = other.funcArgs;
+ }
+ }
+
+ public execute_args deepCopy() {
+ return new execute_args(this);
+ }
+
+ @Override
+ public void clear() {
+ this.functionName = null;
+ this.funcArgs = null;
+ }
+
+ public String get_functionName() {
+ return this.functionName;
+ }
+
+ public void set_functionName(String functionName) {
+ this.functionName = functionName;
+ }
+
+ public void unset_functionName() {
+ this.functionName = null;
+ }
+
+ /** Returns true if field functionName is set (has been assigned a value) and false otherwise */
+ public boolean is_set_functionName() {
+ return this.functionName != null;
+ }
+
+ public void set_functionName_isSet(boolean value) {
+ if (!value) {
+ this.functionName = null;
+ }
+ }
+
+ public String get_funcArgs() {
+ return this.funcArgs;
+ }
+
+ public void set_funcArgs(String funcArgs) {
+ this.funcArgs = funcArgs;
+ }
+
+ public void unset_funcArgs() {
+ this.funcArgs = null;
+ }
+
+ /** Returns true if field funcArgs is set (has been assigned a value) and false otherwise */
+ public boolean is_set_funcArgs() {
+ return this.funcArgs != null;
+ }
+
+ public void set_funcArgs_isSet(boolean value) {
+ if (!value) {
+ this.funcArgs = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case FUNCTION_NAME:
+ if (value == null) {
+ unset_functionName();
+ } else {
+ set_functionName((String)value);
+ }
+ break;
+
+ case FUNC_ARGS:
+ if (value == null) {
+ unset_funcArgs();
+ } else {
+ set_funcArgs((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case FUNCTION_NAME:
+ return get_functionName();
+
+ case FUNC_ARGS:
+ return get_funcArgs();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case FUNCTION_NAME:
+ return is_set_functionName();
+ case FUNC_ARGS:
+ return is_set_funcArgs();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof execute_args)
+ return this.equals((execute_args)that);
+ return false;
+ }
+
+ public boolean equals(execute_args that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_functionName = true && this.is_set_functionName();
+ boolean that_present_functionName = true && that.is_set_functionName();
+ if (this_present_functionName || that_present_functionName) {
+ if (!(this_present_functionName && that_present_functionName))
+ return false;
+ if (!this.functionName.equals(that.functionName))
+ return false;
+ }
+
+ boolean this_present_funcArgs = true && this.is_set_funcArgs();
+ boolean that_present_funcArgs = true && that.is_set_funcArgs();
+ if (this_present_funcArgs || that_present_funcArgs) {
+ if (!(this_present_funcArgs && that_present_funcArgs))
+ return false;
+ if (!this.funcArgs.equals(that.funcArgs))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_functionName = true && (is_set_functionName());
+ list.add(present_functionName);
+ if (present_functionName)
+ list.add(functionName);
+
+ boolean present_funcArgs = true && (is_set_funcArgs());
+ list.add(present_funcArgs);
+ if (present_funcArgs)
+ list.add(funcArgs);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(execute_args other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_functionName()).compareTo(other.is_set_functionName());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_functionName()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.functionName, other.functionName);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_funcArgs()).compareTo(other.is_set_funcArgs());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_funcArgs()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.funcArgs, other.funcArgs);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("execute_args(");
+ boolean first = true;
+
+ sb.append("functionName:");
+ if (this.functionName == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.functionName);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("funcArgs:");
+ if (this.funcArgs == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.funcArgs);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class execute_argsStandardSchemeFactory implements SchemeFactory {
+ public execute_argsStandardScheme getScheme() {
+ return new execute_argsStandardScheme();
+ }
+ }
+
+ private static class execute_argsStandardScheme extends StandardScheme<execute_args> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, execute_args struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // FUNCTION_NAME
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.functionName = iprot.readString();
+ struct.set_functionName_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // FUNC_ARGS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.funcArgs = iprot.readString();
+ struct.set_funcArgs_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, execute_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.functionName != null) {
+ oprot.writeFieldBegin(FUNCTION_NAME_FIELD_DESC);
+ oprot.writeString(struct.functionName);
+ oprot.writeFieldEnd();
+ }
+ if (struct.funcArgs != null) {
+ oprot.writeFieldBegin(FUNC_ARGS_FIELD_DESC);
+ oprot.writeString(struct.funcArgs);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class execute_argsTupleSchemeFactory implements SchemeFactory {
+ public execute_argsTupleScheme getScheme() {
+ return new execute_argsTupleScheme();
+ }
+ }
+
+ private static class execute_argsTupleScheme extends TupleScheme<execute_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, execute_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_functionName()) {
+ optionals.set(0);
+ }
+ if (struct.is_set_funcArgs()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.is_set_functionName()) {
+ oprot.writeString(struct.functionName);
+ }
+ if (struct.is_set_funcArgs()) {
+ oprot.writeString(struct.funcArgs);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, execute_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ struct.functionName = iprot.readString();
+ struct.set_functionName_isSet(true);
+ }
+ if (incoming.get(1)) {
+ struct.funcArgs = iprot.readString();
+ struct.set_funcArgs_isSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class execute_result implements org.apache.thrift.TBase<execute_result, execute_result._Fields>, java.io.Serializable, Cloneable, Comparable<execute_result> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("execute_result");
+
+ private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRING, (short)0);
+ private static final org.apache.thrift.protocol.TField E_FIELD_DESC = new org.apache.thrift.protocol.TField("e", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new execute_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new execute_resultTupleSchemeFactory());
+ }
+
+ private String success; // required
+ private DRPCExecutionException e; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ SUCCESS((short)0, "success"),
+ E((short)1, "e");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 0: // SUCCESS
+ return SUCCESS;
+ case 1: // E
+ return E;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.E, new org.apache.thrift.meta_data.FieldMetaData("e", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRUCT)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(execute_result.class, metaDataMap);
+ }
+
+ public execute_result() {
+ }
+
+ public execute_result(
+ String success,
+ DRPCExecutionException e)
+ {
+ this();
+ this.success = success;
+ this.e = e;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public execute_result(execute_result other) {
+ if (other.is_set_success()) {
+ this.success = other.success;
+ }
+ if (other.is_set_e()) {
+ this.e = new DRPCExecutionException(other.e);
+ }
+ }
+
+ public execute_result deepCopy() {
+ return new execute_result(this);
+ }
+
+ @Override
+ public void clear() {
+ this.success = null;
+ this.e = null;
+ }
+
+ public String get_success() {
+ return this.success;
+ }
+
+ public void set_success(String success) {
+ this.success = success;
+ }
+
+ public void unset_success() {
+ this.success = null;
+ }
+
+ /** Returns true if field success is set (has been assigned a value) and false otherwise */
+ public boolean is_set_success() {
+ return this.success != null;
+ }
+
+ public void set_success_isSet(boolean value) {
+ if (!value) {
+ this.success = null;
+ }
+ }
+
+ public DRPCExecutionException get_e() {
+ return this.e;
+ }
+
+ public void set_e(DRPCExecutionException e) {
+ this.e = e;
+ }
+
+ public void unset_e() {
+ this.e = null;
+ }
+
+ /** Returns true if field e is set (has been assigned a value) and false otherwise */
+ public boolean is_set_e() {
+ return this.e != null;
+ }
+
+ public void set_e_isSet(boolean value) {
+ if (!value) {
+ this.e = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case SUCCESS:
+ if (value == null) {
+ unset_success();
+ } else {
+ set_success((String)value);
+ }
+ break;
+
+ case E:
+ if (value == null) {
+ unset_e();
+ } else {
+ set_e((DRPCExecutionException)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case SUCCESS:
+ return get_success();
+
+ case E:
+ return get_e();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case SUCCESS:
+ return is_set_success();
+ case E:
+ return is_set_e();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof execute_result)
+ return this.equals((execute_result)that);
+ return false;
+ }
+
+ public boolean equals(execute_result that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_success = true && this.is_set_success();
+ boolean that_present_success = true && that.is_set_success();
+ if (this_present_success || that_present_success) {
+ if (!(this_present_success && that_present_success))
+ return false;
+ if (!this.success.equals(that.success))
+ return false;
+ }
+
+ boolean this_present_e = true && this.is_set_e();
+ boolean that_present_e = true && that.is_set_e();
+ if (this_present_e || that_present_e) {
+ if (!(this_present_e && that_present_e))
+ return false;
+ if (!this.e.equals(that.e))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_success = true && (is_set_success());
+ list.add(present_success);
+ if (present_success)
+ list.add(success);
+
+ boolean present_e = true && (is_set_e());
+ list.add(present_e);
+ if (present_e)
+ list.add(e);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(execute_result other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_success()).compareTo(other.is_set_success());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_success()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_e()).compareTo(other.is_set_e());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_e()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.e, other.e);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("execute_result(");
+ boolean first = true;
+
+ sb.append("success:");
+ if (this.success == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.success);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("e:");
+ if (this.e == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.e);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class execute_resultStandardSchemeFactory implements SchemeFactory {
+ public execute_resultStandardScheme getScheme() {
+ return new execute_resultStandardScheme();
+ }
+ }
+
+ private static class execute_resultStandardScheme extends StandardScheme<execute_result> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, execute_result struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 0: // SUCCESS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.success = iprot.readString();
+ struct.set_success_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 1: // E
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.e = new DRPCExecutionException();
+ struct.e.read(iprot);
+ struct.set_e_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, execute_result struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.success != null) {
+ oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+ oprot.writeString(struct.success);
+ oprot.writeFieldEnd();
+ }
+ if (struct.e != null) {
+ oprot.writeFieldBegin(E_FIELD_DESC);
+ struct.e.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class execute_resultTupleSchemeFactory implements SchemeFactory {
+ public execute_resultTupleScheme getScheme() {
+ return new execute_resultTupleScheme();
+ }
+ }
+
+ private static class execute_resultTupleScheme extends TupleScheme<execute_result> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, execute_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_success()) {
+ optionals.set(0);
+ }
+ if (struct.is_set_e()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.is_set_success()) {
+ oprot.writeString(struct.success);
+ }
+ if (struct.is_set_e()) {
+ struct.e.write(oprot);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, execute_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ struct.success = iprot.readString();
+ struct.set_success_isSet(true);
+ }
+ if (incoming.get(1)) {
+ struct.e = new DRPCExecutionException();
+ struct.e.read(iprot);
+ struct.set_e_isSet(true);
+ }
+ }
+ }
+
+ }
+
+}
[22/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/disruptor/MultiProducerSequencer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/MultiProducerSequencer.java b/jstorm-client/src/main/java/backtype/storm/utils/disruptor/MultiProducerSequencer.java
deleted file mode 100644
index ba4521b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/MultiProducerSequencer.java
+++ /dev/null
@@ -1,298 +0,0 @@
-package backtype.storm.utils.disruptor;
-
-
-import java.util.concurrent.locks.LockSupport;
-
-import sun.misc.Unsafe;
-
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.Sequence;
-import com.lmax.disruptor.Sequencer;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.util.Util;
-
-
-/**
- * Coordinator for claiming sequences for access to a data structure while tracking dependent {@link Sequence}s
- *
- * Suitable for use for sequencing across multiple publisher threads.
- */
-public class MultiProducerSequencer extends AbstractSequencerExt
-{
-
-
- private static final Unsafe UNSAFE = Util.getUnsafe();
- private static final long BASE = UNSAFE.arrayBaseOffset(int[].class);
- private static final long SCALE = UNSAFE.arrayIndexScale(int[].class);
-
-
-
- private final Sequence gatingSequenceCache = new Sequence(Sequencer.INITIAL_CURSOR_VALUE);
-
- // availableBuffer tracks the state of each ringbuffer slot
- // see below for more details on the approach
- private final int[] availableBuffer;
- private final int indexMask;
- private final int indexShift;
-
- /**
- * Construct a Sequencer with the selected wait strategy and buffer size.
- *
- * @param bufferSize the size of the buffer that this will sequence over.
- * @param waitStrategy for those waiting on sequences.
- */
- public MultiProducerSequencer(int bufferSize, final WaitStrategy waitStrategy)
- {
- super(bufferSize, waitStrategy);
- availableBuffer = new int[bufferSize];
- indexMask = bufferSize - 1;
- indexShift = Util.log2(bufferSize);
- initialiseAvailableBuffer();
- }
-
- /**
- * @see Sequencer#hasAvailableCapacity(int)
- */
- @Override
- public boolean hasAvailableCapacity(final int requiredCapacity)
- {
- return hasAvailableCapacity(gatingSequences, requiredCapacity, cursor.get());
- }
-
- private boolean hasAvailableCapacity(Sequence[] gatingSequences, final int requiredCapacity, long cursorValue)
- {
- long wrapPoint = (cursorValue + requiredCapacity) - bufferSize;
- long cachedGatingSequence = gatingSequenceCache.get();
-
- if (wrapPoint > cachedGatingSequence || cachedGatingSequence > cursorValue)
- {
- long minSequence = Util.getMinimumSequence(gatingSequences, cursorValue);
- gatingSequenceCache.set(minSequence);
-
- if (wrapPoint > minSequence)
- {
- return false;
- }
- }
-
- return true;
- }
-
- /**
- * @see Sequencer#claim(long)
- */
- @Override
- public void claim(long sequence)
- {
- cursor.set(sequence);
- }
-
- /**
- * @see Sequencer#next()
- */
- @Override
- public long next()
- {
- return next(1);
- }
-
- /**
- * @see Sequencer#next(int)
- */
- @Override
- public long next(int n)
- {
- if (n < 1)
- {
- throw new IllegalArgumentException("n must be > 0");
- }
-
- long current;
- long next;
-
- do
- {
- current = cursor.get();
- next = current + n;
-
- long wrapPoint = next - bufferSize;
- long cachedGatingSequence = gatingSequenceCache.get();
-
- if (wrapPoint > cachedGatingSequence || cachedGatingSequence > current)
- {
- long gatingSequence = Util.getMinimumSequence(gatingSequences, current);
-
- if (wrapPoint > gatingSequence)
- {
- if (AbstractSequencerExt.isWaitSleep()) {
- try {
- Thread.sleep(1);
- } catch (InterruptedException e) {
- }
- }else {
- LockSupport.parkNanos(1);
- }
- continue;
- }
-
- gatingSequenceCache.set(gatingSequence);
- }
- else if (cursor.compareAndSet(current, next))
- {
- break;
- }
- }
- while (true);
-
- return next;
- }
-
- /**
- * @see Sequencer#tryNext()
- */
- @Override
- public long tryNext() throws InsufficientCapacityException
- {
- return tryNext(1);
- }
-
- /**
- * @see Sequencer#tryNext(int)
- */
- @Override
- public long tryNext(int n) throws InsufficientCapacityException
- {
- if (n < 1)
- {
- throw new IllegalArgumentException("n must be > 0");
- }
-
- long current;
- long next;
-
- do
- {
- current = cursor.get();
- next = current + n;
-
- if (!hasAvailableCapacity(gatingSequences, n, current))
- {
- throw InsufficientCapacityException.INSTANCE;
- }
- }
- while (!cursor.compareAndSet(current, next));
-
- return next;
- }
-
- /**
- * @see Sequencer#remainingCapacity()
- */
- @Override
- public long remainingCapacity()
- {
- long consumed = Util.getMinimumSequence(gatingSequences, cursor.get());
- long produced = cursor.get();
- return getBufferSize() - (produced - consumed);
- }
-
- private void initialiseAvailableBuffer()
- {
- for (int i = availableBuffer.length - 1; i != 0; i--)
- {
- setAvailableBufferValue(i, -1);
- }
-
- setAvailableBufferValue(0, -1);
- }
-
- /**
- * @see Sequencer#publish(long)
- */
- @Override
- public void publish(final long sequence)
- {
- setAvailable(sequence);
- waitStrategy.signalAllWhenBlocking();
- }
-
- /**
- * @see Sequencer#publish(long, long)
- */
- @Override
- public void publish(long lo, long hi)
- {
- for (long l = lo; l <= hi; l++)
- {
- setAvailable(l);
- }
- waitStrategy.signalAllWhenBlocking();
- }
-
- /**
- * The below methods work on the availableBuffer flag.
- *
- * The prime reason is to avoid a shared sequence object between publisher threads.
- * (Keeping single pointers tracking start and end would require coordination
- * between the threads).
- *
- * -- Firstly we have the constraint that the delta between the cursor and minimum
- * gating sequence will never be larger than the buffer size (the code in
- * next/tryNext in the Sequence takes care of that).
- * -- Given that; take the sequence value and mask off the lower portion of the
- * sequence as the index into the buffer (indexMask). (aka modulo operator)
- * -- The upper portion of the sequence becomes the value to check for availability.
- * ie: it tells us how many times around the ring buffer we've been (aka division)
- * -- Because we can't wrap without the gating sequences moving forward (i.e. the
- * minimum gating sequence is effectively our last available position in the
- * buffer), when we have new data and successfully claimed a slot we can simply
- * write over the top.
- */
- private void setAvailable(final long sequence)
- {
- setAvailableBufferValue(calculateIndex(sequence), calculateAvailabilityFlag(sequence));
- }
-
- private void setAvailableBufferValue(int index, int flag)
- {
- long bufferAddress = (index * SCALE) + BASE;
- UNSAFE.putOrderedInt(availableBuffer, bufferAddress, flag);
- }
-
- /**
- * @see Sequencer#isAvailable(long)
- */
- @Override
- public boolean isAvailable(long sequence)
- {
- int index = calculateIndex(sequence);
- int flag = calculateAvailabilityFlag(sequence);
- long bufferAddress = (index * SCALE) + BASE;
- return UNSAFE.getIntVolatile(availableBuffer, bufferAddress) == flag;
- }
-
- @Override
- public long getHighestPublishedSequence(long lowerBound, long availableSequence)
- {
- for (long sequence = lowerBound; sequence <= availableSequence; sequence++)
- {
- if (!isAvailable(sequence))
- {
- return sequence - 1;
- }
- }
-
- return availableSequence;
- }
-
- private int calculateAvailabilityFlag(final long sequence)
- {
- return (int) (sequence >>> indexShift);
- }
-
- private int calculateIndex(final long sequence)
- {
- return ((int) sequence) & indexMask;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java b/jstorm-client/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
deleted file mode 100644
index fcc922d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/RingBuffer.java
+++ /dev/null
@@ -1,1184 +0,0 @@
-package backtype.storm.utils.disruptor;
-
-import com.lmax.disruptor.BlockingWaitStrategy;
-import com.lmax.disruptor.Cursored;
-import com.lmax.disruptor.DataProvider;
-import com.lmax.disruptor.EventFactory;
-import com.lmax.disruptor.EventProcessor;
-import com.lmax.disruptor.EventTranslator;
-import com.lmax.disruptor.EventTranslatorOneArg;
-import com.lmax.disruptor.EventTranslatorThreeArg;
-import com.lmax.disruptor.EventTranslatorTwoArg;
-import com.lmax.disruptor.EventTranslatorVararg;
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.Sequence;
-import com.lmax.disruptor.SequenceBarrier;
-import com.lmax.disruptor.Sequencer;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.dsl.ProducerType;
-import backtype.storm.utils.disruptor.MultiProducerSequencer;
-import backtype.storm.utils.disruptor.SingleProducerSequencer;
-
-/**
- * Ring based store of reusable entries containing the data representing
- * an event being exchanged between event producer and {@link EventProcessor}s.
- *
- * @param <E> implementation storing the data for sharing during exchange or parallel coordination of an event.
- */
-public class RingBuffer<E> implements Cursored, DataProvider<E>
-{
- public static final long INITIAL_CURSOR_VALUE = -1L;
-
- private final int indexMask;
- private final Object[] entries;
- private final int bufferSize;
- private final Sequencer sequencer;
-
- /**
- * Construct a RingBuffer with the full option set.
- *
- * @param eventFactory to newInstance entries for filling the RingBuffer
- * @param sequencer sequencer to handle the ordering of events moving through the RingBuffer.
- * @throws IllegalArgumentException if bufferSize is less than 1 or not a power of 2
- */
- public RingBuffer(EventFactory<E> eventFactory,
- Sequencer sequencer)
- {
- this.sequencer = sequencer;
- this.bufferSize = sequencer.getBufferSize();
-
- if (bufferSize < 1)
- {
- throw new IllegalArgumentException("bufferSize must not be less than 1");
- }
- if (Integer.bitCount(bufferSize) != 1)
- {
- throw new IllegalArgumentException("bufferSize must be a power of 2");
- }
-
- this.indexMask = bufferSize - 1;
- this.entries = new Object[sequencer.getBufferSize()];
- fill(eventFactory);
- }
-
- /**
- * Create a new multiple producer RingBuffer with the specified wait strategy.
- *
- * @see MultiProducerSequencer
- * @param factory used to create the events within the ring buffer.
- * @param bufferSize number of elements to create within the ring buffer.
- * @param waitStrategy used to determine how to wait for new elements to become available.
- * @throws IllegalArgumentException if bufferSize is less than 1 or not a power of 2
- */
- public static <E> RingBuffer<E> createMultiProducer(EventFactory<E> factory,
- int bufferSize,
- WaitStrategy waitStrategy)
- {
- MultiProducerSequencer sequencer = new MultiProducerSequencer(bufferSize, waitStrategy);
-
- return new RingBuffer<E>(factory, sequencer);
- }
-
- /**
- * Create a new multiple producer RingBuffer using the default wait strategy {@link BlockingWaitStrategy}.
- *
- * @see MultiProducerSequencer
- * @param factory used to create the events within the ring buffer.
- * @param bufferSize number of elements to create within the ring buffer.
- * @throws IllegalArgumentException if <tt>bufferSize</tt> is less than 1 or not a power of 2
- */
- public static <E> RingBuffer<E> createMultiProducer(EventFactory<E> factory, int bufferSize)
- {
- return createMultiProducer(factory, bufferSize, new BlockingWaitStrategy());
- }
-
- /**
- * Create a new single producer RingBuffer with the specified wait strategy.
- *
- * @see SingleProducerSequencer
- * @param factory used to create the events within the ring buffer.
- * @param bufferSize number of elements to create within the ring buffer.
- * @param waitStrategy used to determine how to wait for new elements to become available.
- * @throws IllegalArgumentException if bufferSize is less than 1 or not a power of 2
- */
- public static <E> RingBuffer<E> createSingleProducer(EventFactory<E> factory,
- int bufferSize,
- WaitStrategy waitStrategy)
- {
- SingleProducerSequencer sequencer = new SingleProducerSequencer(bufferSize, waitStrategy);
-
- return new RingBuffer<E>(factory, sequencer);
- }
-
- /**
- * Create a new single producer RingBuffer using the default wait strategy {@link BlockingWaitStrategy}.
- *
- * @see MultiProducerSequencer
- * @param factory used to create the events within the ring buffer.
- * @param bufferSize number of elements to create within the ring buffer.
- * @throws IllegalArgumentException if <tt>bufferSize</tt> is less than 1 or not a power of 2
- */
- public static <E> RingBuffer<E> createSingleProducer(EventFactory<E> factory, int bufferSize)
- {
- return createSingleProducer(factory, bufferSize, new BlockingWaitStrategy());
- }
-
- /**
- * Create a new Ring Buffer with the specified producer type (SINGLE or MULTI)
- *
- * @param producerType producer type to use {@link ProducerType}.
- * @param factory used to create events within the ring buffer.
- * @param bufferSize number of elements to create within the ring buffer.
- * @param waitStrategy used to determine how to wait for new elements to become available.
- * @throws IllegalArgumentException if bufferSize is less than 1 or not a power of 2
- */
- public static <E> RingBuffer<E> create(ProducerType producerType,
- EventFactory<E> factory,
- int bufferSize,
- WaitStrategy waitStrategy)
- {
- switch (producerType)
- {
- case SINGLE:
- return createSingleProducer(factory, bufferSize, waitStrategy);
- case MULTI:
- return createMultiProducer(factory, bufferSize, waitStrategy);
- default:
- throw new IllegalStateException(producerType.toString());
- }
- }
-
- /**
- * <p>Get the event for a given sequence in the RingBuffer.</p>
- *
- * <p>This call has 2 uses. Firstly use this call when publishing to a ring buffer.
- * After calling {@link RingBuffer#next()} use this call to get hold of the
- * preallocated event to fill with data before calling {@link RingBuffer#publish(long)}.</p>
- *
- * <p>Secondly use this call when consuming data from the ring buffer. After calling
- * {@link SequenceBarrier#waitFor(long)} call this method with any value greater than
- * that your current consumer sequence and less than or equal to the value returned from
- * the {@link SequenceBarrier#waitFor(long)} method.</p>
- *
- * @param sequence for the event
- * @return the event for the given sequence
- */
- @SuppressWarnings("unchecked")
- public E get(long sequence)
- {
- return (E)entries[(int)sequence & indexMask];
- }
-
- /**
- * @deprecated Use {@link RingBuffer#get(long)}
- */
- @Deprecated
- public E getPreallocated(long sequence)
- {
- return get(sequence);
- }
-
- /**
- * @deprecated Use {@link RingBuffer#get(long)}
- */
- @Deprecated
- public E getPublished(long sequence)
- {
- return get(sequence);
- }
-
- /**
- * Increment and return the next sequence for the ring buffer. Calls of this
- * method should ensure that they always publish the sequence afterward. E.g.
- * <pre>
- * long sequence = ringBuffer.next();
- * try {
- * Event e = ringBuffer.get(sequence);
- * // Do some work with the event.
- * } finally {
- * ringBuffer.publish(sequence);
- * }
- * </pre>
- * @see RingBuffer#publish(long)
- * @see RingBuffer#get(long)
- * @return The next sequence to publish to.
- */
- public long next()
- {
- return sequencer.next();
- }
-
- /**
- * The same functionality as {@link RingBuffer#next()}, but allows the caller to claim
- * the next n sequences.
- *
- * @see Sequencer#next(int)
- * @param n number of slots to claim
- * @return sequence number of the highest slot claimed
- */
- public long next(int n)
- {
- return sequencer.next(n);
- }
-
- /**
- * <p>Increment and return the next sequence for the ring buffer. Calls of this
- * method should ensure that they always publish the sequence afterward. E.g.
- * <pre>
- * long sequence = ringBuffer.next();
- * try {
- * Event e = ringBuffer.get(sequence);
- * // Do some work with the event.
- * } finally {
- * ringBuffer.publish(sequence);
- * }
- * </pre>
- * <p>This method will not block if there is not space available in the ring
- * buffer, instead it will throw an {@link InsufficientCapacityException}.
- *
- *
- * @see RingBuffer#publish(long)
- * @see RingBuffer#get(long)
- * @return The next sequence to publish to.
- * @throws InsufficientCapacityException if the necessary space in the ring buffer is not available
- */
- public long tryNext() throws InsufficientCapacityException
- {
- return sequencer.tryNext();
- }
-
- /**
- * The same functionality as {@link RingBuffer#tryNext()}, but allows the caller to attempt
- * to claim the next n sequences.
- *
- * @param n number of slots to claim
- * @return sequence number of the highest slot claimed
- * @throws InsufficientCapacityException if the necessary space in the ring buffer is not available
- */
- public long tryNext(int n) throws InsufficientCapacityException
- {
- return sequencer.tryNext(n);
- }
-
- /**
- * Resets the cursor to a specific value. This can be applied at any time, but it is worth not
- * that it is a racy thing to do and should only be used in controlled circumstances. E.g. during
- * initialisation.
- *
- * @param sequence The sequence to reset too.
- * @throws IllegalStateException If any gating sequences have already been specified.
- */
- public void resetTo(long sequence)
- {
- sequencer.claim(sequence);
- sequencer.publish(sequence);
- }
-
- /**
- * Sets the cursor to a specific sequence and returns the preallocated entry that is stored there. This
- * is another deliberately racy call, that should only be done in controlled circumstances, e.g. initialisation.
- *
- * @param sequence The sequence to claim.
- * @return The preallocated event.
- */
- public E claimAndGetPreallocated(long sequence)
- {
- sequencer.claim(sequence);
- return get(sequence);
- }
-
- /**
- * Determines if a particular entry has been published.
- *
- * @param sequence The sequence to identify the entry.
- * @return If the value has been published or not.
- */
- public boolean isPublished(long sequence)
- {
- return sequencer.isAvailable(sequence);
- }
-
- /**
- * Add the specified gating sequences to this instance of the Disruptor. They will
- * safely and atomically added to the list of gating sequences.
- *
- * @param gatingSequences The sequences to add.
- */
- public void addGatingSequences(Sequence... gatingSequences)
- {
- sequencer.addGatingSequences(gatingSequences);
- }
-
- /**
- * Get the minimum sequence value from all of the gating sequences
- * added to this ringBuffer.
- *
- * @return The minimum gating sequence or the cursor sequence if
- * no sequences have been added.
- */
- public long getMinimumGatingSequence()
- {
- return sequencer.getMinimumSequence();
- }
-
- /**
- * Remove the specified sequence from this ringBuffer.
- *
- * @param sequence to be removed.
- * @return <tt>true</tt> if this sequence was found, <tt>false</tt> otherwise.
- */
- public boolean removeGatingSequence(Sequence sequence)
- {
- return sequencer.removeGatingSequence(sequence);
- }
-
- /**
- * Create a new SequenceBarrier to be used by an EventProcessor to track which messages
- * are available to be read from the ring buffer given a list of sequences to track.
- *
- * @see SequenceBarrier
- * @param sequencesToTrack the additional sequences to track
- * @return A sequence barrier that will track the specified sequences.
- */
- public SequenceBarrier newBarrier(Sequence... sequencesToTrack)
- {
- return sequencer.newBarrier(sequencesToTrack);
- }
-
- /**
- * Get the current cursor value for the ring buffer. The cursor value is
- * the last value that was published, or the highest available sequence
- * that can be consumed.
- */
- public long getCursor()
- {
- return sequencer.getCursor();
- }
-
- /**
- * The size of the buffer.
- */
- public int getBufferSize()
- {
- return bufferSize;
- }
-
- /**
- * Given specified <tt>requiredCapacity</tt> determines if that amount of space
- * is available. Note, you can not assume that if this method returns <tt>true</tt>
- * that a call to {@link RingBuffer#next()} will not block. Especially true if this
- * ring buffer is set up to handle multiple producers.
- *
- * @param requiredCapacity The capacity to check for.
- * @return <tt>true</tt> If the specified <tt>requiredCapacity</tt> is available
- * <tt>false</tt> if now.
- */
- public boolean hasAvailableCapacity(int requiredCapacity)
- {
- return sequencer.hasAvailableCapacity(requiredCapacity);
- }
-
-
- /**
- * Publishes an event to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation.
- *
- * @param translator The user specified translation for the event
- */
- public void publishEvent(EventTranslator<E> translator)
- {
- final long sequence = sequencer.next();
- translateAndPublish(translator, sequence);
- }
-
- /**
- * Attempts to publish an event to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation. Will return false if specified capacity
- * was not available.
- *
- * @param translator The user specified translation for the event
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public boolean tryPublishEvent(EventTranslator<E> translator)
- {
- try
- {
- final long sequence = sequencer.tryNext();
- translateAndPublish(translator, sequence);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows one user supplied argument.
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- */
- public <A> void publishEvent(EventTranslatorOneArg<E, A> translator, A arg0)
- {
- final long sequence = sequencer.next();
- translateAndPublish(translator, sequence, arg0);
- }
-
- /**
- * Allows one user supplied argument.
- *
- * @see #tryPublishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public <A> boolean tryPublishEvent(EventTranslatorOneArg<E, A> translator, A arg0)
- {
- try
- {
- final long sequence = sequencer.tryNext();
- translateAndPublish(translator, sequence, arg0);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows two user supplied arguments.
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @param arg1 A user supplied argument.
- */
- public <A, B> void publishEvent(EventTranslatorTwoArg<E, A, B> translator, A arg0, B arg1)
- {
- final long sequence = sequencer.next();
- translateAndPublish(translator, sequence, arg0, arg1);
- }
-
- /**
- * Allows two user supplied arguments.
- *
- * @see #tryPublishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @param arg1 A user supplied argument.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public <A, B> boolean tryPublishEvent(EventTranslatorTwoArg<E, A, B> translator, A arg0, B arg1)
- {
- try
- {
- final long sequence = sequencer.tryNext();
- translateAndPublish(translator, sequence, arg0, arg1);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows three user supplied arguments
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @param arg1 A user supplied argument.
- * @param arg2 A user supplied argument.
- */
- public <A, B, C> void publishEvent(EventTranslatorThreeArg<E, A, B, C> translator, A arg0, B arg1, C arg2)
- {
- final long sequence = sequencer.next();
- translateAndPublish(translator, sequence, arg0, arg1, arg2);
- }
-
- /**
- * Allows three user supplied arguments
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @param arg1 A user supplied argument.
- * @param arg2 A user supplied argument.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public <A, B, C> boolean tryPublishEvent(EventTranslatorThreeArg<E, A, B, C> translator, A arg0, B arg1, C arg2)
- {
- try
- {
- final long sequence = sequencer.tryNext();
- translateAndPublish(translator, sequence, arg0, arg1, arg2);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows a variable number of user supplied arguments
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param args User supplied arguments.
- */
- public void publishEvent(EventTranslatorVararg<E> translator, Object...args)
- {
- final long sequence = sequencer.next();
- translateAndPublish(translator, sequence, args);
- }
-
- /**
- * Allows a variable number of user supplied arguments
- *
- * @see #publishEvent(EventTranslator)
- * @param translator The user specified translation for the event
- * @param args User supplied arguments.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public boolean tryPublishEvent(EventTranslatorVararg<E> translator, Object...args)
- {
- try
- {
- final long sequence = sequencer.tryNext();
- translateAndPublish(translator, sequence, args);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
-
- /**
- * Publishes multiple events to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation.
- *
- * @param translators The user specified translation for each event
- */
- public void publishEvents(EventTranslator<E>[] translators)
- {
- publishEvents(translators, 0, translators.length);
- }
-
- /**
- * Publishes multiple events to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation.
- *
- * @param translators The user specified translation for each event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch
- */
- public void publishEvents(EventTranslator<E>[] translators, int batchStartsAt, int batchSize)
- {
- checkBounds(translators, batchStartsAt, batchSize);
- final long finalSequence = sequencer.next(batchSize);
- translateAndPublishBatch(translators, batchStartsAt, batchSize, finalSequence);
- }
-
- /**
- * Attempts to publish multiple events to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation. Will return false if specified capacity
- * was not available.
- *
- * @param translators The user specified translation for the event
- * @return true if the value was published, false if there was insufficient
- * capacity.
- */
- public boolean tryPublishEvents(EventTranslator<E>[] translators)
- {
- return tryPublishEvents(translators, 0, translators.length);
- }
-
- /**
- * Attempts to publish multiple events to the ring buffer. It handles
- * claiming the next sequence, getting the current (uninitialised)
- * event from the ring buffer and publishing the claimed sequence
- * after translation. Will return false if specified capacity
- * was not available.
- *
- * @param translators The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch
- * @return true if all the values were published, false if there was insufficient
- * capacity.
- */
- public boolean tryPublishEvents(EventTranslator<E>[] translators, int batchStartsAt, int batchSize)
- {
- checkBounds(translators, batchStartsAt, batchSize);
- try
- {
- final long finalSequence = sequencer.tryNext(batchSize);
- translateAndPublishBatch(translators, batchStartsAt, batchSize, finalSequence);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows one user supplied argument per event.
- *
- * @param translator The user specified translation for the event
- * @param arg0 A user supplied argument.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A> void publishEvents(EventTranslatorOneArg<E, A> translator, A[] arg0)
- {
- publishEvents(translator, 0, arg0.length, arg0);
- }
-
- /**
- * Allows one user supplied argument per event.
- *
- * @param translator The user specified translation for each event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch
- * @param arg0 An array of user supplied arguments, one element per event.
- * @see #publishEvents(EventTranslator[])
- */
- public <A> void publishEvents(EventTranslatorOneArg<E, A> translator, int batchStartsAt, int batchSize, A[] arg0)
- {
- checkBounds(arg0, batchStartsAt, batchSize);
- final long finalSequence = sequencer.next(batchSize);
- translateAndPublishBatch(translator, arg0, batchStartsAt, batchSize, finalSequence);
- }
-
- /**
- * Allows one user supplied argument.
- *
- * @param translator The user specified translation for each event
- * @param arg0 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #tryPublishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A> boolean tryPublishEvents(EventTranslatorOneArg<E, A> translator, A[] arg0)
- {
- return tryPublishEvents(translator, 0, arg0.length, arg0);
- }
-
- /**
- * Allows one user supplied argument.
- *
- * @param translator The user specified translation for each event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch
- * @param arg0 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #tryPublishEvents(EventTranslator[])
- */
- public <A> boolean tryPublishEvents(EventTranslatorOneArg<E, A> translator, int batchStartsAt, int batchSize, A[] arg0)
- {
- checkBounds(arg0, batchStartsAt, batchSize);
- try
- {
- final long finalSequence = sequencer.tryNext(batchSize);
- translateAndPublishBatch(translator, arg0, batchStartsAt, batchSize, finalSequence);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows two user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A, B> void publishEvents(EventTranslatorTwoArg<E, A, B> translator, A[] arg0, B[] arg1)
- {
- publishEvents(translator, 0, arg0.length, arg0, arg1);
- }
-
- /**
- * Allows two user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch.
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @see #publishEvents(EventTranslator[])
- */
- public <A, B> void publishEvents(EventTranslatorTwoArg<E, A, B> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1)
- {
- checkBounds(arg0, arg1, batchStartsAt, batchSize);
- final long finalSequence = sequencer.next(batchSize);
- translateAndPublishBatch(translator, arg0, arg1, batchStartsAt, batchSize, finalSequence);
- }
-
- /**
- * Allows two user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #tryPublishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A, B> boolean tryPublishEvents(EventTranslatorTwoArg<E, A, B> translator, A[] arg0, B[] arg1)
- {
- return tryPublishEvents(translator, 0, arg0.length, arg0, arg1);
- }
-
- /**
- * Allows two user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch.
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #tryPublishEvents(EventTranslator[])
- */
- public <A, B> boolean tryPublishEvents(EventTranslatorTwoArg<E, A, B> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1)
- {
- checkBounds(arg0, arg1, batchStartsAt, batchSize);
- try
- {
- final long finalSequence = sequencer.tryNext(batchSize);
- translateAndPublishBatch(translator, arg0, arg1, batchStartsAt, batchSize, finalSequence);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows three user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @param arg2 An array of user supplied arguments, one element per event.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A, B, C> void publishEvents(EventTranslatorThreeArg<E, A, B, C> translator, A[] arg0, B[] arg1, C[] arg2)
- {
- publishEvents(translator, 0, arg0.length, arg0, arg1, arg2);
- }
-
- /**
- * Allows three user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The number of elements in the batch.
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @param arg2 An array of user supplied arguments, one element per event.
- * @see #publishEvents(EventTranslator[])
- */
- public <A, B, C> void publishEvents(EventTranslatorThreeArg<E, A, B, C> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1, C[] arg2)
- {
- checkBounds(arg0, arg1, arg2, batchStartsAt, batchSize);
- final long finalSequence = sequencer.next(batchSize);
- translateAndPublishBatch(translator, arg0, arg1, arg2, batchStartsAt, batchSize, finalSequence);
- }
-
- /**
- * Allows three user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @param arg2 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public <A, B, C> boolean tryPublishEvents(EventTranslatorThreeArg<E, A, B, C> translator, A[] arg0, B[] arg1, C[] arg2)
- {
- return tryPublishEvents(translator, 0, arg0.length, arg0, arg1, arg2);
- }
-
- /**
- * Allows three user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch.
- * @param arg0 An array of user supplied arguments, one element per event.
- * @param arg1 An array of user supplied arguments, one element per event.
- * @param arg2 An array of user supplied arguments, one element per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #publishEvents(EventTranslator[])
- */
- public <A, B, C> boolean tryPublishEvents(EventTranslatorThreeArg<E, A, B, C> translator, int batchStartsAt, int batchSize, A[] arg0, B[] arg1, C[] arg2)
- {
- checkBounds(arg0, arg1, arg2, batchStartsAt, batchSize);
- try
- {
- final long finalSequence = sequencer.tryNext(batchSize);
- translateAndPublishBatch(translator, arg0, arg1, arg2, batchStartsAt, batchSize, finalSequence);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Allows a variable number of user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param args User supplied arguments, one Object[] per event.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public void publishEvents(EventTranslatorVararg<E> translator, Object[]... args)
- {
- publishEvents(translator, 0, args.length, args);
- }
-
- /**
- * Allows a variable number of user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch
- * @param args User supplied arguments, one Object[] per event.
- * @see #publishEvents(EventTranslator[])
- */
- public void publishEvents(EventTranslatorVararg<E> translator, int batchStartsAt, int batchSize, Object[]... args)
- {
- checkBounds(batchStartsAt, batchSize, args);
- final long finalSequence = sequencer.next(batchSize);
- translateAndPublishBatch(translator, batchStartsAt, batchSize, finalSequence, args);
- }
-
- /**
- * Allows a variable number of user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param args User supplied arguments, one Object[] per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #publishEvents(com.lmax.disruptor.EventTranslator[])
- */
- public boolean tryPublishEvents(EventTranslatorVararg<E> translator, Object[]... args)
- {
- return tryPublishEvents(translator, 0, args.length, args);
- }
-
- /**
- * Allows a variable number of user supplied arguments per event.
- *
- * @param translator The user specified translation for the event
- * @param batchStartsAt The first element of the array which is within the batch.
- * @param batchSize The actual size of the batch.
- * @param args User supplied arguments, one Object[] per event.
- * @return true if the value was published, false if there was insufficient
- * capacity.
- * @see #publishEvents(EventTranslator[])
- */
- public boolean tryPublishEvents(EventTranslatorVararg<E> translator, int batchStartsAt, int batchSize, Object[]... args)
- {
- checkBounds(args, batchStartsAt, batchSize);
- try
- {
- final long finalSequence = sequencer.tryNext(batchSize);
- translateAndPublishBatch(translator, batchStartsAt, batchSize, finalSequence, args);
- return true;
- }
- catch (InsufficientCapacityException e)
- {
- return false;
- }
- }
-
- /**
- * Publish the specified sequence. This action marks this particular
- * message as being available to be read.
- *
- * @param sequence the sequence to publish.
- */
- public void publish(long sequence)
- {
- sequencer.publish(sequence);
- }
-
- /**
- * Publish the specified sequences. This action marks these particular
- * messages as being available to be read.
- *
- * @see Sequencer#next(int)
- * @param lo the lowest sequence number to be published
- * @param hi the highest sequence number to be published
- */
- public void publish(long lo, long hi)
- {
- sequencer.publish(lo, hi);
- }
-
- /**
- * Get the remaining capacity for this ringBuffer.
- * @return The number of slots remaining.
- */
- public long remainingCapacity()
- {
- return sequencer.remainingCapacity();
- }
-
- private void checkBounds(final EventTranslator<E>[] translators, final int batchStartsAt, final int batchSize)
- {
- checkBatchSizing(batchStartsAt, batchSize);
- batchOverRuns(translators, batchStartsAt, batchSize);
- }
-
- private void checkBatchSizing(int batchStartsAt, int batchSize)
- {
- if(batchStartsAt < 0 || batchSize < 0)
- {
- throw new IllegalArgumentException("Both batchStartsAt and batchSize must be positive but got: batchStartsAt " + batchStartsAt + " and batchSize " + batchSize);
- }
- else if(batchSize > bufferSize)
- {
- throw new IllegalArgumentException("The ring buffer cannot accommodate " + batchSize + " it only has space for " + bufferSize + " entities.");
- }
- }
-
- private <A> void checkBounds(final A[] arg0, final int batchStartsAt, final int batchSize)
- {
- checkBatchSizing(batchStartsAt, batchSize);
- batchOverRuns(arg0, batchStartsAt, batchSize);
- }
-
- private <A, B> void checkBounds(final A[] arg0, final B[] arg1, final int batchStartsAt, final int batchSize)
- {
- checkBatchSizing(batchStartsAt, batchSize);
- batchOverRuns(arg0, batchStartsAt, batchSize);
- batchOverRuns(arg1, batchStartsAt, batchSize);
- }
-
- private <A, B, C> void checkBounds(final A[] arg0, final B[] arg1, final C[] arg2, final int batchStartsAt, final int batchSize)
- {
- checkBatchSizing(batchStartsAt, batchSize);
- batchOverRuns(arg0, batchStartsAt, batchSize);
- batchOverRuns(arg1, batchStartsAt, batchSize);
- batchOverRuns(arg2, batchStartsAt, batchSize);
- }
-
- private void checkBounds(final int batchStartsAt, final int batchSize, final Object[][] args)
- {
- checkBatchSizing(batchStartsAt, batchSize);
- batchOverRuns(args, batchStartsAt, batchSize);
- }
-
- private <A> void batchOverRuns(final A[] arg0, final int batchStartsAt, final int batchSize)
- {
- if(batchStartsAt + batchSize > arg0.length)
- {
- throw new IllegalArgumentException("A batchSize of: " + batchSize +
- " with batchStatsAt of: " + batchStartsAt +
- " will overrun the available number of arguments: " + (arg0.length - batchStartsAt));
- }
- }
-
- private void translateAndPublish(EventTranslator<E> translator, long sequence)
- {
- try
- {
- translator.translateTo(get(sequence), sequence);
- }
- finally
- {
- sequencer.publish(sequence);
- }
- }
-
- private <A> void translateAndPublish(EventTranslatorOneArg<E, A> translator, long sequence, A arg0)
- {
- try
- {
- translator.translateTo(get(sequence), sequence, arg0);
- }
- finally
- {
- sequencer.publish(sequence);
- }
- }
-
- private <A, B> void translateAndPublish(EventTranslatorTwoArg<E, A, B> translator, long sequence, A arg0, B arg1)
- {
- try
- {
- translator.translateTo(get(sequence), sequence, arg0, arg1);
- }
- finally
- {
- sequencer.publish(sequence);
- }
- }
-
- private <A, B, C> void translateAndPublish(EventTranslatorThreeArg<E, A, B, C> translator, long sequence,
- A arg0, B arg1, C arg2)
- {
- try
- {
- translator.translateTo(get(sequence), sequence, arg0, arg1, arg2);
- }
- finally
- {
- sequencer.publish(sequence);
- }
- }
-
- private void translateAndPublish(EventTranslatorVararg<E> translator, long sequence, Object...args)
- {
- try
- {
- translator.translateTo(get(sequence), sequence, args);
- }
- finally
- {
- sequencer.publish(sequence);
- }
- }
-
- private void translateAndPublishBatch(final EventTranslator<E>[] translators, int batchStartsAt,
- final int batchSize, final long finalSequence)
- {
- final long initialSequence = finalSequence - (batchSize - 1);
- try
- {
- long sequence = initialSequence;
- final int batchEndsAt = batchStartsAt + batchSize;
- for (int i = batchStartsAt; i < batchEndsAt; i++)
- {
- final EventTranslator<E> translator = translators[i];
- translator.translateTo(get(sequence), sequence++);
- }
- }
- finally
- {
- sequencer.publish(initialSequence, finalSequence);
- }
- }
-
- private <A> void translateAndPublishBatch(final EventTranslatorOneArg<E, A> translator, final A[] arg0,
- int batchStartsAt, final int batchSize, final long finalSequence)
- {
- final long initialSequence = finalSequence - (batchSize - 1);
- try
- {
- long sequence = initialSequence;
- final int batchEndsAt = batchStartsAt + batchSize;
- for (int i = batchStartsAt; i < batchEndsAt; i++)
- {
- translator.translateTo(get(sequence), sequence++, arg0[i]);
- }
- }
- finally
- {
- sequencer.publish(initialSequence, finalSequence);
- }
- }
-
- private <A, B> void translateAndPublishBatch(final EventTranslatorTwoArg<E, A, B> translator, final A[] arg0,
- final B[] arg1, int batchStartsAt, int batchSize,
- final long finalSequence)
- {
- final long initialSequence = finalSequence - (batchSize - 1);
- try
- {
- long sequence = initialSequence;
- final int batchEndsAt = batchStartsAt + batchSize;
- for (int i = batchStartsAt; i < batchEndsAt; i++)
- {
- translator.translateTo(get(sequence), sequence++, arg0[i], arg1[i]);
- }
- }
- finally
- {
- sequencer.publish(initialSequence, finalSequence);
- }
- }
-
- private <A, B, C> void translateAndPublishBatch(final EventTranslatorThreeArg<E, A, B, C> translator,
- final A[] arg0, final B[] arg1, final C[] arg2, int batchStartsAt,
- final int batchSize, final long finalSequence)
- {
- final long initialSequence = finalSequence - (batchSize - 1);
- try
- {
- long sequence = initialSequence;
- final int batchEndsAt = batchStartsAt + batchSize;
- for (int i = batchStartsAt; i < batchEndsAt; i++)
- {
- translator.translateTo(get(sequence), sequence++, arg0[i], arg1[i], arg2[i]);
- }
- }
- finally
- {
- sequencer.publish(initialSequence, finalSequence);
- }
- }
-
- private void translateAndPublishBatch(final EventTranslatorVararg<E> translator, int batchStartsAt,
- final int batchSize, final long finalSequence, final Object[][] args)
- {
- final long initialSequence = finalSequence - (batchSize - 1);
- try
- {
- long sequence = initialSequence;
- final int batchEndsAt = batchStartsAt + batchSize;
- for (int i = batchStartsAt; i < batchEndsAt; i++)
- {
- translator.translateTo(get(sequence), sequence++, args[i]);
- }
- }
- finally
- {
- sequencer.publish(initialSequence, finalSequence);
- }
- }
-
- private void fill(EventFactory<E> eventFactory)
- {
- for (int i = 0; i < entries.length; i++)
- {
- entries[i] = eventFactory.newInstance();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/disruptor/SingleProducerSequencer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/SingleProducerSequencer.java b/jstorm-client/src/main/java/backtype/storm/utils/disruptor/SingleProducerSequencer.java
deleted file mode 100644
index 422a6d4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/SingleProducerSequencer.java
+++ /dev/null
@@ -1,199 +0,0 @@
-package backtype.storm.utils.disruptor;
-
-import java.util.concurrent.locks.LockSupport;
-
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.Sequence;
-import com.lmax.disruptor.Sequencer;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.util.Util;
-
-
-/**
- * <p>Coordinator for claiming sequences for access to a data structure while tracking dependent {@link Sequence}s.<p>
- *
- * <p>Generally not safe for use from multiple threads as it does not implement any barriers.</p>
- */
-public class SingleProducerSequencer extends AbstractSequencerExt
-{
- @SuppressWarnings("unused")
- private static class Padding
- {
- /** Set to -1 as sequence starting point */
- public long nextValue = -1L, cachedValue = -1L, p2, p3, p4, p5, p6, p7;
- }
-
- private final Padding pad = new Padding();
-
- /**
- * Construct a Sequencer with the selected wait strategy and buffer size.
- *
- * @param bufferSize the size of the buffer that this will sequence over.
- * @param waitStrategy for those waiting on sequences.
- */
- public SingleProducerSequencer(int bufferSize, final WaitStrategy waitStrategy)
- {
- super(bufferSize, waitStrategy);
- }
-
- /**
- * @see Sequencer#hasAvailableCapacity(int)
- */
- @Override
- public boolean hasAvailableCapacity(final int requiredCapacity)
- {
- long nextValue = pad.nextValue;
-
- long wrapPoint = (nextValue + requiredCapacity) - bufferSize;
- long cachedGatingSequence = pad.cachedValue;
-
- if (wrapPoint > cachedGatingSequence || cachedGatingSequence > nextValue)
- {
- long minSequence = Util.getMinimumSequence(gatingSequences, nextValue);
- pad.cachedValue = minSequence;
-
- if (wrapPoint > minSequence)
- {
- return false;
- }
- }
-
- return true;
- }
-
- /**
- * @see Sequencer#next()
- */
- @Override
- public long next()
- {
- return next(1);
- }
-
- /**
- * @see Sequencer#next(int)
- */
- @Override
- public long next(int n)
- {
- if (n < 1)
- {
- throw new IllegalArgumentException("n must be > 0");
- }
-
- long nextValue = pad.nextValue;
-
- long nextSequence = nextValue + n;
- long wrapPoint = nextSequence - bufferSize;
- long cachedGatingSequence = pad.cachedValue;
-
- if (wrapPoint > cachedGatingSequence || cachedGatingSequence > nextValue)
- {
- long minSequence;
- while (wrapPoint > (minSequence = Util.getMinimumSequence(gatingSequences, nextValue)))
- {
- if (AbstractSequencerExt.isWaitSleep()) {
- try {
- Thread.sleep(1);
- } catch (InterruptedException e) {
- }
- }else {
- LockSupport.parkNanos(1);
- }
- }
-
- pad.cachedValue = minSequence;
- }
-
- pad.nextValue = nextSequence;
-
- return nextSequence;
- }
-
- /**
- * @see Sequencer#tryNext()
- */
- @Override
- public long tryNext() throws InsufficientCapacityException
- {
- return tryNext(1);
- }
-
- /**
- * @see Sequencer#tryNext(int)
- */
- @Override
- public long tryNext(int n) throws InsufficientCapacityException
- {
- if (n < 1)
- {
- throw new IllegalArgumentException("n must be > 0");
- }
-
- if (!hasAvailableCapacity(n))
- {
- throw InsufficientCapacityException.INSTANCE;
- }
-
- long nextSequence = pad.nextValue += n;
-
- return nextSequence;
- }
-
- /**
- * @see Sequencer#remainingCapacity()
- */
- @Override
- public long remainingCapacity()
- {
- long nextValue = pad.nextValue;
-
- long consumed = Util.getMinimumSequence(gatingSequences, nextValue);
- long produced = nextValue;
- return getBufferSize() - (produced - consumed);
- }
-
- /**
- * @see Sequencer#claim(long)
- */
- @Override
- public void claim(long sequence)
- {
- pad.nextValue = sequence;
- }
-
- /**
- * @see Sequencer#publish(long)
- */
- @Override
- public void publish(long sequence)
- {
- cursor.set(sequence);
- waitStrategy.signalAllWhenBlocking();
- }
-
- /**
- * @see Sequencer#publish(long, long)
- */
- @Override
- public void publish(long lo, long hi)
- {
- publish(hi);
- }
-
- /**
- * @see Sequencer#isAvailable(long)
- */
- @Override
- public boolean isAvailable(long sequence)
- {
- return sequence <= cursor.get();
- }
-
- @Override
- public long getHighestPublishedSequence(long lowerBound, long availableSequence)
- {
- return availableSequence;
- }
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/JoinType.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/JoinType.java b/jstorm-client/src/main/java/storm/trident/JoinType.java
deleted file mode 100644
index 30169d4..0000000
--- a/jstorm-client/src/main/java/storm/trident/JoinType.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package storm.trident;
-
-import java.util.Arrays;
-import java.util.List;
-
-public enum JoinType {
- INNER,
- OUTER;
-
- public static List<JoinType> mixed(JoinType... types) {
- return Arrays.asList(types);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/Stream.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/Stream.java b/jstorm-client/src/main/java/storm/trident/Stream.java
deleted file mode 100644
index 629cad0..0000000
--- a/jstorm-client/src/main/java/storm/trident/Stream.java
+++ /dev/null
@@ -1,360 +0,0 @@
-package storm.trident;
-
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.NullStruct;
-import storm.trident.fluent.ChainedAggregatorDeclarer;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import storm.trident.fluent.GlobalAggregationScheme;
-import storm.trident.fluent.GroupedStream;
-import storm.trident.fluent.IAggregatableStream;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.Assembly;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.Filter;
-import storm.trident.operation.Function;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.impl.CombinerAggStateUpdater;
-import storm.trident.operation.impl.FilterExecutor;
-import storm.trident.operation.impl.GlobalBatchToPartition;
-import storm.trident.operation.impl.ReducerAggStateUpdater;
-import storm.trident.operation.impl.IndexHashBatchToPartition;
-import storm.trident.operation.impl.SingleEmitAggregator.BatchToPartition;
-import storm.trident.operation.impl.TrueFilter;
-import storm.trident.partition.GlobalGrouping;
-import storm.trident.partition.IdentityGrouping;
-import storm.trident.partition.IndexHashGrouping;
-import storm.trident.planner.Node;
-import storm.trident.planner.NodeStateInfo;
-import storm.trident.planner.PartitionNode;
-import storm.trident.planner.ProcessorNode;
-import storm.trident.planner.processor.AggregateProcessor;
-import storm.trident.planner.processor.EachProcessor;
-import storm.trident.planner.processor.PartitionPersistProcessor;
-import storm.trident.planner.processor.ProjectedProcessor;
-import storm.trident.planner.processor.StateQueryProcessor;
-import storm.trident.state.QueryFunction;
-import storm.trident.state.StateFactory;
-import storm.trident.state.StateSpec;
-import storm.trident.state.StateUpdater;
-import storm.trident.util.TridentUtils;
-
-// TODO: need to be able to replace existing fields with the function fields (like Cascading Fields.REPLACE)
-public class Stream implements IAggregatableStream {
- Node _node;
- TridentTopology _topology;
- String _name;
-
- protected Stream(TridentTopology topology, String name, Node node) {
- _topology = topology;
- _node = node;
- _name = name;
- }
-
- public Stream name(String name) {
- return new Stream(_topology, name, _node);
- }
-
- public Stream parallelismHint(int hint) {
- _node.parallelismHint = hint;
- return this;
- }
-
- public Stream project(Fields keepFields) {
- projectionValidation(keepFields);
- return _topology.addSourcedNode(this, new ProcessorNode(_topology.getUniqueStreamId(), _name, keepFields, new Fields(), new ProjectedProcessor(keepFields)));
- }
-
- public GroupedStream groupBy(Fields fields) {
- projectionValidation(fields);
- return new GroupedStream(this, fields);
- }
-
- public Stream partitionBy(Fields fields) {
- projectionValidation(fields);
- return partition(Grouping.fields(fields.toList()));
- }
-
- public Stream partition(CustomStreamGrouping partitioner) {
- return partition(Grouping.custom_serialized(Utils.serialize(partitioner)));
- }
-
- public Stream shuffle() {
- return partition(Grouping.shuffle(new NullStruct()));
- }
-
- public Stream localOrShuffle() {
- return partition(Grouping.local_or_shuffle(new NullStruct()));
- }
- public Stream global() {
- // use this instead of storm's built in one so that we can specify a singleemitbatchtopartition
- // without knowledge of storm's internals
- return partition(new GlobalGrouping());
- }
-
- public Stream batchGlobal() {
- // the first field is the batch id
- return partition(new IndexHashGrouping(0));
- }
-
- public Stream broadcast() {
- return partition(Grouping.all(new NullStruct()));
- }
-
- public Stream identityPartition() {
- return partition(new IdentityGrouping());
- }
-
- public Stream partition(Grouping grouping) {
- if(_node instanceof PartitionNode) {
- return each(new Fields(), new TrueFilter()).partition(grouping);
- } else {
- return _topology.addSourcedNode(this, new PartitionNode(_node.streamId, _name, getOutputFields(), grouping));
- }
- }
-
- public Stream applyAssembly(Assembly assembly) {
- return assembly.apply(this);
- }
-
- @Override
- public Stream each(Fields inputFields, Function function, Fields functionFields) {
- projectionValidation(inputFields);
- return _topology.addSourcedNode(this,
- new ProcessorNode(_topology.getUniqueStreamId(),
- _name,
- TridentUtils.fieldsConcat(getOutputFields(), functionFields),
- functionFields,
- new EachProcessor(inputFields, function)));
- }
-
- //creates brand new tuples with brand new fields
- @Override
- public Stream partitionAggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return _topology.addSourcedNode(this,
- new ProcessorNode(_topology.getUniqueStreamId(),
- _name,
- functionFields,
- functionFields,
- new AggregateProcessor(inputFields, agg)));
- }
-
- public Stream stateQuery(TridentState state, Fields inputFields, QueryFunction function, Fields functionFields) {
- projectionValidation(inputFields);
- String stateId = state._node.stateInfo.id;
- Node n = new ProcessorNode(_topology.getUniqueStreamId(),
- _name,
- TridentUtils.fieldsConcat(getOutputFields(), functionFields),
- functionFields,
- new StateQueryProcessor(stateId, inputFields, function));
- _topology._colocate.get(stateId).add(n);
- return _topology.addSourcedNode(this, n);
- }
-
- public TridentState partitionPersist(StateFactory stateFactory, Fields inputFields, StateUpdater updater, Fields functionFields) {
- return partitionPersist(new StateSpec(stateFactory), inputFields, updater, functionFields);
- }
-
- public TridentState partitionPersist(StateSpec stateSpec, Fields inputFields, StateUpdater updater, Fields functionFields) {
- projectionValidation(inputFields);
- String id = _topology.getUniqueStateId();
- ProcessorNode n = new ProcessorNode(_topology.getUniqueStreamId(),
- _name,
- functionFields,
- functionFields,
- new PartitionPersistProcessor(id, inputFields, updater));
- n.committer = true;
- n.stateInfo = new NodeStateInfo(id, stateSpec);
- return _topology.addSourcedStateNode(this, n);
- }
-
- public TridentState partitionPersist(StateFactory stateFactory, Fields inputFields, StateUpdater updater) {
- return partitionPersist(stateFactory, inputFields, updater, new Fields());
- }
-
- public TridentState partitionPersist(StateSpec stateSpec, Fields inputFields, StateUpdater updater) {
- return partitionPersist(stateSpec, inputFields, updater, new Fields());
- }
-
- public Stream each(Function function, Fields functionFields) {
- return each(null, function, functionFields);
- }
-
- public Stream each(Fields inputFields, Filter filter) {
- return each(inputFields, new FilterExecutor(filter), new Fields());
- }
-
- public ChainedAggregatorDeclarer chainedAgg() {
- return new ChainedAggregatorDeclarer(this, new BatchGlobalAggScheme());
- }
-
- public Stream partitionAggregate(Aggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public Stream partitionAggregate(CombinerAggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public Stream partitionAggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return chainedAgg()
- .partitionAggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream partitionAggregate(ReducerAggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public Stream partitionAggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return chainedAgg()
- .partitionAggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream aggregate(Aggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return chainedAgg()
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream aggregate(CombinerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return chainedAgg()
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream aggregate(ReducerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return chainedAgg()
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public TridentState partitionPersist(StateFactory stateFactory, StateUpdater updater, Fields functionFields) {
- return partitionPersist(new StateSpec(stateFactory), updater, functionFields);
- }
-
- public TridentState partitionPersist(StateSpec stateSpec, StateUpdater updater, Fields functionFields) {
- return partitionPersist(stateSpec, null, updater, functionFields);
- }
-
- public TridentState partitionPersist(StateFactory stateFactory, StateUpdater updater) {
- return partitionPersist(stateFactory, updater, new Fields());
- }
-
- public TridentState partitionPersist(StateSpec stateSpec, StateUpdater updater) {
- return partitionPersist(stateSpec, updater, new Fields());
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(spec, null, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), inputFields, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- // replaces normal aggregation here with a global grouping because it needs to be consistent across batches
- return new ChainedAggregatorDeclarer(this, new GlobalAggScheme())
- .aggregate(inputFields, agg, functionFields)
- .chainEnd()
- .partitionPersist(spec, functionFields, new CombinerAggStateUpdater(agg), functionFields);
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(spec, null, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), inputFields, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- projectionValidation(inputFields);
- return global().partitionPersist(spec, inputFields, new ReducerAggStateUpdater(agg), functionFields);
- }
-
- public Stream stateQuery(TridentState state, QueryFunction function, Fields functionFields) {
- return stateQuery(state, null, function, functionFields);
- }
-
- @Override
- public Stream toStream() {
- return this;
- }
-
- @Override
- public Fields getOutputFields() {
- return _node.allOutputFields;
- }
-
- static class BatchGlobalAggScheme implements GlobalAggregationScheme<Stream> {
-
- @Override
- public IAggregatableStream aggPartition(Stream s) {
- return s.batchGlobal();
- }
-
- @Override
- public BatchToPartition singleEmitPartitioner() {
- return new IndexHashBatchToPartition();
- }
-
- }
-
- static class GlobalAggScheme implements GlobalAggregationScheme<Stream> {
-
- @Override
- public IAggregatableStream aggPartition(Stream s) {
- return s.global();
- }
-
- @Override
- public BatchToPartition singleEmitPartitioner() {
- return new GlobalBatchToPartition();
- }
-
- }
-
- private void projectionValidation(Fields projFields) {
- if (projFields == null) {
- return;
- }
-
- Fields allFields = this.getOutputFields();
- for (String field : projFields) {
- if (!allFields.contains(field)) {
- throw new IllegalArgumentException("Trying to select non-existent field: '" + field + "' from stream containing fields fields: <" + allFields + ">");
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/TridentState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/TridentState.java b/jstorm-client/src/main/java/storm/trident/TridentState.java
deleted file mode 100644
index c6771d5..0000000
--- a/jstorm-client/src/main/java/storm/trident/TridentState.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package storm.trident;
-
-import storm.trident.planner.Node;
-
-
-public class TridentState {
- TridentTopology _topology;
- Node _node;
-
- protected TridentState(TridentTopology topology, Node node) {
- _topology = topology;
- _node = node;
- }
-
- public Stream newValuesStream() {
- return new Stream(_topology, _node.name, _node);
- }
-
- public TridentState parallelismHint(int parallelism) {
- _node.parallelismHint = parallelism;
- return this;
- }
-}
[39/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java b/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
deleted file mode 100644
index 86bec91..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
+++ /dev/null
@@ -1,2015 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class DistributedRPCInvocations {
-
- public interface Iface {
-
- public void result(String id, String result) throws org.apache.thrift7.TException;
-
- public DRPCRequest fetchRequest(String functionName) throws org.apache.thrift7.TException;
-
- public void failRequest(String id) throws org.apache.thrift7.TException;
-
- }
-
- public interface AsyncIface {
-
- public void result(String id, String result, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.result_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void fetchRequest(String functionName, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.fetchRequest_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void failRequest(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.failRequest_call> resultHandler) throws org.apache.thrift7.TException;
-
- }
-
- public static class Client extends org.apache.thrift7.TServiceClient implements Iface {
- public static class Factory implements org.apache.thrift7.TServiceClientFactory<Client> {
- public Factory() {}
- public Client getClient(org.apache.thrift7.protocol.TProtocol prot) {
- return new Client(prot);
- }
- public Client getClient(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- return new Client(iprot, oprot);
- }
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol prot)
- {
- super(prot, prot);
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- super(iprot, oprot);
- }
-
- public void result(String id, String result) throws org.apache.thrift7.TException
- {
- send_result(id, result);
- recv_result();
- }
-
- public void send_result(String id, String result) throws org.apache.thrift7.TException
- {
- result_args args = new result_args();
- args.set_id(id);
- args.set_result(result);
- sendBase("result", args);
- }
-
- public void recv_result() throws org.apache.thrift7.TException
- {
- result_result result = new result_result();
- receiveBase(result, "result");
- return;
- }
-
- public DRPCRequest fetchRequest(String functionName) throws org.apache.thrift7.TException
- {
- send_fetchRequest(functionName);
- return recv_fetchRequest();
- }
-
- public void send_fetchRequest(String functionName) throws org.apache.thrift7.TException
- {
- fetchRequest_args args = new fetchRequest_args();
- args.set_functionName(functionName);
- sendBase("fetchRequest", args);
- }
-
- public DRPCRequest recv_fetchRequest() throws org.apache.thrift7.TException
- {
- fetchRequest_result result = new fetchRequest_result();
- receiveBase(result, "fetchRequest");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "fetchRequest failed: unknown result");
- }
-
- public void failRequest(String id) throws org.apache.thrift7.TException
- {
- send_failRequest(id);
- recv_failRequest();
- }
-
- public void send_failRequest(String id) throws org.apache.thrift7.TException
- {
- failRequest_args args = new failRequest_args();
- args.set_id(id);
- sendBase("failRequest", args);
- }
-
- public void recv_failRequest() throws org.apache.thrift7.TException
- {
- failRequest_result result = new failRequest_result();
- receiveBase(result, "failRequest");
- return;
- }
-
- }
- public static class AsyncClient extends org.apache.thrift7.async.TAsyncClient implements AsyncIface {
- public static class Factory implements org.apache.thrift7.async.TAsyncClientFactory<AsyncClient> {
- private org.apache.thrift7.async.TAsyncClientManager clientManager;
- private org.apache.thrift7.protocol.TProtocolFactory protocolFactory;
- public Factory(org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.protocol.TProtocolFactory protocolFactory) {
- this.clientManager = clientManager;
- this.protocolFactory = protocolFactory;
- }
- public AsyncClient getAsyncClient(org.apache.thrift7.transport.TNonblockingTransport transport) {
- return new AsyncClient(protocolFactory, clientManager, transport);
- }
- }
-
- public AsyncClient(org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.transport.TNonblockingTransport transport) {
- super(protocolFactory, clientManager, transport);
- }
-
- public void result(String id, String result, org.apache.thrift7.async.AsyncMethodCallback<result_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- result_call method_call = new result_call(id, result, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class result_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- private String result;
- public result_call(String id, String result, org.apache.thrift7.async.AsyncMethodCallback<result_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- this.result = result;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("result", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- result_args args = new result_args();
- args.set_id(id);
- args.set_result(result);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_result();
- }
- }
-
- public void fetchRequest(String functionName, org.apache.thrift7.async.AsyncMethodCallback<fetchRequest_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- fetchRequest_call method_call = new fetchRequest_call(functionName, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class fetchRequest_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String functionName;
- public fetchRequest_call(String functionName, org.apache.thrift7.async.AsyncMethodCallback<fetchRequest_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.functionName = functionName;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("fetchRequest", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- fetchRequest_args args = new fetchRequest_args();
- args.set_functionName(functionName);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public DRPCRequest getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_fetchRequest();
- }
- }
-
- public void failRequest(String id, org.apache.thrift7.async.AsyncMethodCallback<failRequest_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- failRequest_call method_call = new failRequest_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class failRequest_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public failRequest_call(String id, org.apache.thrift7.async.AsyncMethodCallback<failRequest_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("failRequest", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- failRequest_args args = new failRequest_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_failRequest();
- }
- }
-
- }
-
- public static class Processor<I extends Iface> extends org.apache.thrift7.TBaseProcessor implements org.apache.thrift7.TProcessor {
- private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
- public Processor(I iface) {
- super(iface, getProcessMap(new HashMap<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>>()));
- }
-
- protected Processor(I iface, Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- super(iface, getProcessMap(processMap));
- }
-
- private static <I extends Iface> Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> getProcessMap(Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- processMap.put("result", new result());
- processMap.put("fetchRequest", new fetchRequest());
- processMap.put("failRequest", new failRequest());
- return processMap;
- }
-
- private static class result<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, result_args> {
- public result() {
- super("result");
- }
-
- protected result_args getEmptyArgsInstance() {
- return new result_args();
- }
-
- protected result_result getResult(I iface, result_args args) throws org.apache.thrift7.TException {
- result_result result = new result_result();
- iface.result(args.id, args.result);
- return result;
- }
- }
-
- private static class fetchRequest<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, fetchRequest_args> {
- public fetchRequest() {
- super("fetchRequest");
- }
-
- protected fetchRequest_args getEmptyArgsInstance() {
- return new fetchRequest_args();
- }
-
- protected fetchRequest_result getResult(I iface, fetchRequest_args args) throws org.apache.thrift7.TException {
- fetchRequest_result result = new fetchRequest_result();
- result.success = iface.fetchRequest(args.functionName);
- return result;
- }
- }
-
- private static class failRequest<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, failRequest_args> {
- public failRequest() {
- super("failRequest");
- }
-
- protected failRequest_args getEmptyArgsInstance() {
- return new failRequest_args();
- }
-
- protected failRequest_result getResult(I iface, failRequest_args args) throws org.apache.thrift7.TException {
- failRequest_result result = new failRequest_result();
- iface.failRequest(args.id);
- return result;
- }
- }
-
- }
-
- public static class result_args implements org.apache.thrift7.TBase<result_args, result_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("result_args");
-
- private static final org.apache.thrift7.protocol.TField ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("id", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField RESULT_FIELD_DESC = new org.apache.thrift7.protocol.TField("result", org.apache.thrift7.protocol.TType.STRING, (short)2);
-
- private String id; // required
- private String result; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ID((short)1, "id"),
- RESULT((short)2, "result");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ID
- return ID;
- case 2: // RESULT
- return RESULT;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ID, new org.apache.thrift7.meta_data.FieldMetaData("id", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.RESULT, new org.apache.thrift7.meta_data.FieldMetaData("result", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(result_args.class, metaDataMap);
- }
-
- public result_args() {
- }
-
- public result_args(
- String id,
- String result)
- {
- this();
- this.id = id;
- this.result = result;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public result_args(result_args other) {
- if (other.is_set_id()) {
- this.id = other.id;
- }
- if (other.is_set_result()) {
- this.result = other.result;
- }
- }
-
- public result_args deepCopy() {
- return new result_args(this);
- }
-
- @Override
- public void clear() {
- this.id = null;
- this.result = null;
- }
-
- public String get_id() {
- return this.id;
- }
-
- public void set_id(String id) {
- this.id = id;
- }
-
- public void unset_id() {
- this.id = null;
- }
-
- /** Returns true if field id is set (has been assigned a value) and false otherwise */
- public boolean is_set_id() {
- return this.id != null;
- }
-
- public void set_id_isSet(boolean value) {
- if (!value) {
- this.id = null;
- }
- }
-
- public String get_result() {
- return this.result;
- }
-
- public void set_result(String result) {
- this.result = result;
- }
-
- public void unset_result() {
- this.result = null;
- }
-
- /** Returns true if field result is set (has been assigned a value) and false otherwise */
- public boolean is_set_result() {
- return this.result != null;
- }
-
- public void set_result_isSet(boolean value) {
- if (!value) {
- this.result = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ID:
- if (value == null) {
- unset_id();
- } else {
- set_id((String)value);
- }
- break;
-
- case RESULT:
- if (value == null) {
- unset_result();
- } else {
- set_result((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ID:
- return get_id();
-
- case RESULT:
- return get_result();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ID:
- return is_set_id();
- case RESULT:
- return is_set_result();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof result_args)
- return this.equals((result_args)that);
- return false;
- }
-
- public boolean equals(result_args that) {
- if (that == null)
- return false;
-
- boolean this_present_id = true && this.is_set_id();
- boolean that_present_id = true && that.is_set_id();
- if (this_present_id || that_present_id) {
- if (!(this_present_id && that_present_id))
- return false;
- if (!this.id.equals(that.id))
- return false;
- }
-
- boolean this_present_result = true && this.is_set_result();
- boolean that_present_result = true && that.is_set_result();
- if (this_present_result || that_present_result) {
- if (!(this_present_result && that_present_result))
- return false;
- if (!this.result.equals(that.result))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_id = true && (is_set_id());
- builder.append(present_id);
- if (present_id)
- builder.append(id);
-
- boolean present_result = true && (is_set_result());
- builder.append(present_result);
- if (present_result)
- builder.append(result);
-
- return builder.toHashCode();
- }
-
- public int compareTo(result_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- result_args typedOther = (result_args)other;
-
- lastComparison = Boolean.valueOf(is_set_id()).compareTo(typedOther.is_set_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.id, typedOther.id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_result()).compareTo(typedOther.is_set_result());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_result()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.result, typedOther.result);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // RESULT
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.result = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.id != null) {
- oprot.writeFieldBegin(ID_FIELD_DESC);
- oprot.writeString(this.id);
- oprot.writeFieldEnd();
- }
- if (this.result != null) {
- oprot.writeFieldBegin(RESULT_FIELD_DESC);
- oprot.writeString(this.result);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("result_args(");
- boolean first = true;
-
- sb.append("id:");
- if (this.id == null) {
- sb.append("null");
- } else {
- sb.append(this.id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("result:");
- if (this.result == null) {
- sb.append("null");
- } else {
- sb.append(this.result);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class result_result implements org.apache.thrift7.TBase<result_result, result_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("result_result");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(result_result.class, metaDataMap);
- }
-
- public result_result() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public result_result(result_result other) {
- }
-
- public result_result deepCopy() {
- return new result_result(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof result_result)
- return this.equals((result_result)that);
- return false;
- }
-
- public boolean equals(result_result that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(result_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- result_result typedOther = (result_result)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("result_result(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class fetchRequest_args implements org.apache.thrift7.TBase<fetchRequest_args, fetchRequest_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("fetchRequest_args");
-
- private static final org.apache.thrift7.protocol.TField FUNCTION_NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("functionName", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String functionName; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- FUNCTION_NAME((short)1, "functionName");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // FUNCTION_NAME
- return FUNCTION_NAME;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.FUNCTION_NAME, new org.apache.thrift7.meta_data.FieldMetaData("functionName", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(fetchRequest_args.class, metaDataMap);
- }
-
- public fetchRequest_args() {
- }
-
- public fetchRequest_args(
- String functionName)
- {
- this();
- this.functionName = functionName;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public fetchRequest_args(fetchRequest_args other) {
- if (other.is_set_functionName()) {
- this.functionName = other.functionName;
- }
- }
-
- public fetchRequest_args deepCopy() {
- return new fetchRequest_args(this);
- }
-
- @Override
- public void clear() {
- this.functionName = null;
- }
-
- public String get_functionName() {
- return this.functionName;
- }
-
- public void set_functionName(String functionName) {
- this.functionName = functionName;
- }
-
- public void unset_functionName() {
- this.functionName = null;
- }
-
- /** Returns true if field functionName is set (has been assigned a value) and false otherwise */
- public boolean is_set_functionName() {
- return this.functionName != null;
- }
-
- public void set_functionName_isSet(boolean value) {
- if (!value) {
- this.functionName = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case FUNCTION_NAME:
- if (value == null) {
- unset_functionName();
- } else {
- set_functionName((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case FUNCTION_NAME:
- return get_functionName();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case FUNCTION_NAME:
- return is_set_functionName();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof fetchRequest_args)
- return this.equals((fetchRequest_args)that);
- return false;
- }
-
- public boolean equals(fetchRequest_args that) {
- if (that == null)
- return false;
-
- boolean this_present_functionName = true && this.is_set_functionName();
- boolean that_present_functionName = true && that.is_set_functionName();
- if (this_present_functionName || that_present_functionName) {
- if (!(this_present_functionName && that_present_functionName))
- return false;
- if (!this.functionName.equals(that.functionName))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_functionName = true && (is_set_functionName());
- builder.append(present_functionName);
- if (present_functionName)
- builder.append(functionName);
-
- return builder.toHashCode();
- }
-
- public int compareTo(fetchRequest_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- fetchRequest_args typedOther = (fetchRequest_args)other;
-
- lastComparison = Boolean.valueOf(is_set_functionName()).compareTo(typedOther.is_set_functionName());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_functionName()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.functionName, typedOther.functionName);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // FUNCTION_NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.functionName = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.functionName != null) {
- oprot.writeFieldBegin(FUNCTION_NAME_FIELD_DESC);
- oprot.writeString(this.functionName);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("fetchRequest_args(");
- boolean first = true;
-
- sb.append("functionName:");
- if (this.functionName == null) {
- sb.append("null");
- } else {
- sb.append(this.functionName);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class fetchRequest_result implements org.apache.thrift7.TBase<fetchRequest_result, fetchRequest_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("fetchRequest_result");
-
- private static final org.apache.thrift7.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift7.protocol.TField("success", org.apache.thrift7.protocol.TType.STRUCT, (short)0);
-
- private DRPCRequest success; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SUCCESS((short)0, "success");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 0: // SUCCESS
- return SUCCESS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUCCESS, new org.apache.thrift7.meta_data.FieldMetaData("success", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, DRPCRequest.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(fetchRequest_result.class, metaDataMap);
- }
-
- public fetchRequest_result() {
- }
-
- public fetchRequest_result(
- DRPCRequest success)
- {
- this();
- this.success = success;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public fetchRequest_result(fetchRequest_result other) {
- if (other.is_set_success()) {
- this.success = new DRPCRequest(other.success);
- }
- }
-
- public fetchRequest_result deepCopy() {
- return new fetchRequest_result(this);
- }
-
- @Override
- public void clear() {
- this.success = null;
- }
-
- public DRPCRequest get_success() {
- return this.success;
- }
-
- public void set_success(DRPCRequest success) {
- this.success = success;
- }
-
- public void unset_success() {
- this.success = null;
- }
-
- /** Returns true if field success is set (has been assigned a value) and false otherwise */
- public boolean is_set_success() {
- return this.success != null;
- }
-
- public void set_success_isSet(boolean value) {
- if (!value) {
- this.success = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SUCCESS:
- if (value == null) {
- unset_success();
- } else {
- set_success((DRPCRequest)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SUCCESS:
- return get_success();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SUCCESS:
- return is_set_success();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof fetchRequest_result)
- return this.equals((fetchRequest_result)that);
- return false;
- }
-
- public boolean equals(fetchRequest_result that) {
- if (that == null)
- return false;
-
- boolean this_present_success = true && this.is_set_success();
- boolean that_present_success = true && that.is_set_success();
- if (this_present_success || that_present_success) {
- if (!(this_present_success && that_present_success))
- return false;
- if (!this.success.equals(that.success))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_success = true && (is_set_success());
- builder.append(present_success);
- if (present_success)
- builder.append(success);
-
- return builder.toHashCode();
- }
-
- public int compareTo(fetchRequest_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- fetchRequest_result typedOther = (fetchRequest_result)other;
-
- lastComparison = Boolean.valueOf(is_set_success()).compareTo(typedOther.is_set_success());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_success()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.success, typedOther.success);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 0: // SUCCESS
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.success = new DRPCRequest();
- this.success.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- if (this.is_set_success()) {
- oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
- this.success.write(oprot);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("fetchRequest_result(");
- boolean first = true;
-
- sb.append("success:");
- if (this.success == null) {
- sb.append("null");
- } else {
- sb.append(this.success);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class failRequest_args implements org.apache.thrift7.TBase<failRequest_args, failRequest_args._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("failRequest_args");
-
- private static final org.apache.thrift7.protocol.TField ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("id", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String id; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ID((short)1, "id");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ID
- return ID;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ID, new org.apache.thrift7.meta_data.FieldMetaData("id", org.apache.thrift7.TFieldRequirementType.DEFAULT,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(failRequest_args.class, metaDataMap);
- }
-
- public failRequest_args() {
- }
-
- public failRequest_args(
- String id)
- {
- this();
- this.id = id;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public failRequest_args(failRequest_args other) {
- if (other.is_set_id()) {
- this.id = other.id;
- }
- }
-
- public failRequest_args deepCopy() {
- return new failRequest_args(this);
- }
-
- @Override
- public void clear() {
- this.id = null;
- }
-
- public String get_id() {
- return this.id;
- }
-
- public void set_id(String id) {
- this.id = id;
- }
-
- public void unset_id() {
- this.id = null;
- }
-
- /** Returns true if field id is set (has been assigned a value) and false otherwise */
- public boolean is_set_id() {
- return this.id != null;
- }
-
- public void set_id_isSet(boolean value) {
- if (!value) {
- this.id = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ID:
- if (value == null) {
- unset_id();
- } else {
- set_id((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ID:
- return get_id();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ID:
- return is_set_id();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof failRequest_args)
- return this.equals((failRequest_args)that);
- return false;
- }
-
- public boolean equals(failRequest_args that) {
- if (that == null)
- return false;
-
- boolean this_present_id = true && this.is_set_id();
- boolean that_present_id = true && that.is_set_id();
- if (this_present_id || that_present_id) {
- if (!(this_present_id && that_present_id))
- return false;
- if (!this.id.equals(that.id))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_id = true && (is_set_id());
- builder.append(present_id);
- if (present_id)
- builder.append(id);
-
- return builder.toHashCode();
- }
-
- public int compareTo(failRequest_args other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- failRequest_args typedOther = (failRequest_args)other;
-
- lastComparison = Boolean.valueOf(is_set_id()).compareTo(typedOther.is_set_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.id, typedOther.id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.id != null) {
- oprot.writeFieldBegin(ID_FIELD_DESC);
- oprot.writeString(this.id);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("failRequest_args(");
- boolean first = true;
-
- sb.append("id:");
- if (this.id == null) {
- sb.append("null");
- } else {
- sb.append(this.id);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
- public static class failRequest_result implements org.apache.thrift7.TBase<failRequest_result, failRequest_result._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("failRequest_result");
-
-
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
-;
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(failRequest_result.class, metaDataMap);
- }
-
- public failRequest_result() {
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public failRequest_result(failRequest_result other) {
- }
-
- public failRequest_result deepCopy() {
- return new failRequest_result(this);
- }
-
- @Override
- public void clear() {
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof failRequest_result)
- return this.equals((failRequest_result)that);
- return false;
- }
-
- public boolean equals(failRequest_result that) {
- if (that == null)
- return false;
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- return builder.toHashCode();
- }
-
- public int compareTo(failRequest_result other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- failRequest_result typedOther = (failRequest_result)other;
-
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- oprot.writeStructBegin(STRUCT_DESC);
-
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("failRequest_result(");
- boolean first = true;
-
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ErrorInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ErrorInfo.java b/jstorm-client/src/main/java/backtype/storm/generated/ErrorInfo.java
deleted file mode 100644
index 5e3bb6e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ErrorInfo.java
+++ /dev/null
@@ -1,425 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ErrorInfo implements org.apache.thrift7.TBase<ErrorInfo, ErrorInfo._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ErrorInfo");
-
- private static final org.apache.thrift7.protocol.TField ERROR_FIELD_DESC = new org.apache.thrift7.protocol.TField("error", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField ERROR_TIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("error_time_secs", org.apache.thrift7.protocol.TType.I32, (short)2);
-
- private String error; // required
- private int error_time_secs; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ERROR((short)1, "error"),
- ERROR_TIME_SECS((short)2, "error_time_secs");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ERROR
- return ERROR;
- case 2: // ERROR_TIME_SECS
- return ERROR_TIME_SECS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __ERROR_TIME_SECS_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ERROR, new org.apache.thrift7.meta_data.FieldMetaData("error", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.ERROR_TIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("error_time_secs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ErrorInfo.class, metaDataMap);
- }
-
- public ErrorInfo() {
- }
-
- public ErrorInfo(
- String error,
- int error_time_secs)
- {
- this();
- this.error = error;
- this.error_time_secs = error_time_secs;
- set_error_time_secs_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public ErrorInfo(ErrorInfo other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_error()) {
- this.error = other.error;
- }
- this.error_time_secs = other.error_time_secs;
- }
-
- public ErrorInfo deepCopy() {
- return new ErrorInfo(this);
- }
-
- @Override
- public void clear() {
- this.error = null;
- set_error_time_secs_isSet(false);
- this.error_time_secs = 0;
- }
-
- public String get_error() {
- return this.error;
- }
-
- public void set_error(String error) {
- this.error = error;
- }
-
- public void unset_error() {
- this.error = null;
- }
-
- /** Returns true if field error is set (has been assigned a value) and false otherwise */
- public boolean is_set_error() {
- return this.error != null;
- }
-
- public void set_error_isSet(boolean value) {
- if (!value) {
- this.error = null;
- }
- }
-
- public int get_error_time_secs() {
- return this.error_time_secs;
- }
-
- public void set_error_time_secs(int error_time_secs) {
- this.error_time_secs = error_time_secs;
- set_error_time_secs_isSet(true);
- }
-
- public void unset_error_time_secs() {
- __isset_bit_vector.clear(__ERROR_TIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field error_time_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_error_time_secs() {
- return __isset_bit_vector.get(__ERROR_TIME_SECS_ISSET_ID);
- }
-
- public void set_error_time_secs_isSet(boolean value) {
- __isset_bit_vector.set(__ERROR_TIME_SECS_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ERROR:
- if (value == null) {
- unset_error();
- } else {
- set_error((String)value);
- }
- break;
-
- case ERROR_TIME_SECS:
- if (value == null) {
- unset_error_time_secs();
- } else {
- set_error_time_secs((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ERROR:
- return get_error();
-
- case ERROR_TIME_SECS:
- return Integer.valueOf(get_error_time_secs());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ERROR:
- return is_set_error();
- case ERROR_TIME_SECS:
- return is_set_error_time_secs();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof ErrorInfo)
- return this.equals((ErrorInfo)that);
- return false;
- }
-
- public boolean equals(ErrorInfo that) {
- if (that == null)
- return false;
-
- boolean this_present_error = true && this.is_set_error();
- boolean that_present_error = true && that.is_set_error();
- if (this_present_error || that_present_error) {
- if (!(this_present_error && that_present_error))
- return false;
- if (!this.error.equals(that.error))
- return false;
- }
-
- boolean this_present_error_time_secs = true;
- boolean that_present_error_time_secs = true;
- if (this_present_error_time_secs || that_present_error_time_secs) {
- if (!(this_present_error_time_secs && that_present_error_time_secs))
- return false;
- if (this.error_time_secs != that.error_time_secs)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_error = true && (is_set_error());
- builder.append(present_error);
- if (present_error)
- builder.append(error);
-
- boolean present_error_time_secs = true;
- builder.append(present_error_time_secs);
- if (present_error_time_secs)
- builder.append(error_time_secs);
-
- return builder.toHashCode();
- }
-
- public int compareTo(ErrorInfo other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- ErrorInfo typedOther = (ErrorInfo)other;
-
- lastComparison = Boolean.valueOf(is_set_error()).compareTo(typedOther.is_set_error());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_error()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.error, typedOther.error);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_error_time_secs()).compareTo(typedOther.is_set_error_time_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_error_time_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.error_time_secs, typedOther.error_time_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ERROR
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.error = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // ERROR_TIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.error_time_secs = iprot.readI32();
- set_error_time_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.error != null) {
- oprot.writeFieldBegin(ERROR_FIELD_DESC);
- oprot.writeString(this.error);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(ERROR_TIME_SECS_FIELD_DESC);
- oprot.writeI32(this.error_time_secs);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ErrorInfo(");
- boolean first = true;
-
- sb.append("error:");
- if (this.error == null) {
- sb.append("null");
- } else {
- sb.append(this.error);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("error_time_secs:");
- sb.append(this.error_time_secs);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_error()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'error' is unset! Struct:" + toString());
- }
-
- if (!is_set_error_time_secs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'error_time_secs' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[51/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
Release 2.0.4-SNAPSHOT
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/e935da91
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/e935da91
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/e935da91
Branch: refs/heads/jstorm-import
Commit: e935da91a897797dad56e24c4ffa57860ac91878
Parents: ae1f5dd
Author: zhongyan.feng <zh...@alipay.com>
Authored: Wed Aug 5 20:11:20 2015 +0800
Committer: zhongyan.feng <zh...@alipay.com>
Committed: Wed Aug 5 20:11:20 2015 +0800
----------------------------------------------------------------------
LICENSE | 0
README.md | Bin
bin/check_jstorm_Supervisor.sh | 13 +
bin/jstorm.py | 459 +
bin/start.sh | 78 +
bin/stop.sh | 15 +
conf/cgconfig.conf | 18 +
conf/client_log4j.properties | 19 +
conf/client_logback.xml | 18 +
conf/jstorm.log4j.properties | 50 +
conf/jstorm.logback.xml | 84 +
conf/storm.yaml | 83 +
dev-tools/add_apache_license.sh | 1 +
dev-tools/java_license_header.txt | 17 +
dev-tools/py_license_header.txt | 18 +
dev-tools/storm-eclipse-java-formatter.xml | 291 +
docs/log.test.xlsx | Bin 0 -> 10428 bytes
example/sequence-split-merge/.classpath | 31 +
example/sequence-split-merge/.gitignore | 2 +
example/sequence-split-merge/.project | 23 +
.../.settings/org.eclipse.core.resources.prefs | 5 +
.../.settings/org.eclipse.jdt.core.prefs | 5 +
.../.settings/org.eclipse.m2e.core.prefs | 4 +
example/sequence-split-merge/conf/conf.prop | 0
example/sequence-split-merge/conf/conf.yaml | 0
example/sequence-split-merge/conf/topology.yaml | 0
example/sequence-split-merge/drpc.sh | 3 +
example/sequence-split-merge/pom.xml | 44 +-
.../alipay/dw/jstorm/example/IntervalCheck.java | 17 +
.../alipay/dw/jstorm/example/TpsCounter.java | 17 +
.../example/batch/SimpleBatchTopology.java | 78 +-
.../dw/jstorm/example/batch/SimpleBolt.java | 19 +-
.../dw/jstorm/example/batch/SimpleSpout.java | 17 +
.../dw/jstorm/example/drpc/ReachTopology.java | 41 +-
.../jstorm/example/drpc/TestReachTopology.java | 26 +-
.../example/sequence/SequenceTopology.java | 87 +-
.../example/sequence/SequenceTopologyDef.java | 17 +
.../example/sequence/SequenceTopologyTool.java | 17 +
.../sequence/SequenceTopologyUserDefine.java | 17 +
.../dw/jstorm/example/sequence/bean/Pair.java | 17 +
.../jstorm/example/sequence/bean/PairMaker.java | 17 +
.../example/sequence/bean/TradeCustomer.java | 17 +
.../example/sequence/bolt/MergeRecord.java | 17 +
.../jstorm/example/sequence/bolt/PairCount.java | 17 +
.../example/sequence/bolt/SplitRecord.java | 17 +
.../example/sequence/bolt/TotalCount.java | 72 +-
.../example/sequence/spout/SequenceSpout.java | 21 +-
.../transcation/TransactionalGlobalCount.java | 358 +-
example/sequence-split-merge/start.sh | 4 +-
history.md | 868 +-
history_cn.md | 77 +
jstorm-client-extension/pom.xml | 85 -
.../java/com/alibaba/jstorm/batch/BatchId.java | 66 -
.../jstorm/batch/BatchTopologyBuilder.java | 69 -
.../com/alibaba/jstorm/batch/IBatchSpout.java | 37 -
.../com/alibaba/jstorm/batch/ICommitter.java | 36 -
.../com/alibaba/jstorm/batch/IPostCommit.java | 14 -
.../alibaba/jstorm/batch/IPrepareCommit.java | 20 -
.../jstorm/batch/impl/BatchSpoutMsgId.java | 49 -
.../jstorm/batch/impl/BatchSpoutTrigger.java | 312 -
.../jstorm/batch/impl/CoordinatedBolt.java | 281 -
.../alibaba/jstorm/batch/util/BatchCommon.java | 65 -
.../com/alibaba/jstorm/batch/util/BatchDef.java | 26 -
.../alibaba/jstorm/batch/util/BatchStatus.java | 33 -
.../jstorm/callback/AsyncLoopDefaultKill.java | 25 -
.../jstorm/callback/AsyncLoopRunnable.java | 82 -
.../jstorm/callback/AsyncLoopThread.java | 122 -
.../alibaba/jstorm/callback/BaseCallback.java | 13 -
.../com/alibaba/jstorm/callback/Callback.java | 13 -
.../jstorm/callback/ClusterStateCallback.java | 7 -
.../jstorm/callback/DefaultWatcherCallBack.java | 26 -
.../jstorm/callback/RunnableCallback.java | 47 -
.../jstorm/callback/WatcherCallBack.java | 8 -
.../alibaba/jstorm/client/ConfigExtension.java | 642 -
.../alibaba/jstorm/client/WorkerAssignment.java | 264 -
.../jstorm/client/metric/MetricCallback.java | 7 -
.../jstorm/client/metric/MetricClient.java | 66 -
.../jstorm/client/spout/IAckValueSpout.java | 15 -
.../jstorm/client/spout/IFailValueSpout.java | 15 -
.../alibaba/jstorm/cluster/ClusterState.java | 38 -
.../jstorm/cluster/DistributedClusterState.java | 175 -
.../jstorm/common/stats/StatBuckets.java | 124 -
.../jstorm/common/stats/StaticsType.java | 5 -
.../alibaba/jstorm/metric/JStormHistogram.java | 39 -
.../com/alibaba/jstorm/metric/JStormTimer.java | 61 -
.../com/alibaba/jstorm/metric/MetricDef.java | 39 -
.../com/alibaba/jstorm/metric/MetricInfo.java | 27 -
.../com/alibaba/jstorm/metric/MetricJstack.java | 123 -
.../java/com/alibaba/jstorm/metric/Metrics.java | 330 -
.../alibaba/jstorm/metric/UserDefMetric.java | 106 -
.../jstorm/metric/UserDefMetricData.java | 138 -
.../jstorm/metric/metrdata/CounterData.java | 23 -
.../jstorm/metric/metrdata/GaugeData.java | 23 -
.../jstorm/metric/metrdata/HistogramData.java | 112 -
.../jstorm/metric/metrdata/MeterData.java | 58 -
.../jstorm/metric/metrdata/TimerData.java | 149 -
.../alibaba/jstorm/utils/DisruptorQueue.java | 151 -
.../com/alibaba/jstorm/utils/EventSampler.java | 100 -
.../alibaba/jstorm/utils/ExpiredCallback.java | 5 -
.../com/alibaba/jstorm/utils/FileAttribute.java | 118 -
.../alibaba/jstorm/utils/HttpserverUtils.java | 30 -
.../com/alibaba/jstorm/utils/IntervalCheck.java | 58 -
.../com/alibaba/jstorm/utils/JStormUtils.java | 1076 -
.../com/alibaba/jstorm/utils/NetWorkUtils.java | 120 -
.../alibaba/jstorm/utils/OlderFileFilter.java | 29 -
.../java/com/alibaba/jstorm/utils/Pair.java | 28 -
.../com/alibaba/jstorm/utils/PathUtils.java | 130 -
.../com/alibaba/jstorm/utils/RandomRange.java | 57 -
.../com/alibaba/jstorm/utils/RotatingMap.java | 133 -
.../com/alibaba/jstorm/utils/RunCounter.java | 85 -
.../com/alibaba/jstorm/utils/SmartThread.java | 18 -
.../com/alibaba/jstorm/utils/TimeCacheMap.java | 147 -
.../alibaba/jstorm/utils/TimeCacheQueue.java | 168 -
.../com/alibaba/jstorm/utils/TimeFormat.java | 199 -
.../com/alibaba/jstorm/utils/TimeOutMap.java | 18 -
.../com/alibaba/jstorm/utils/TimeUtils.java | 34 -
.../java/com/alibaba/jstorm/zk/Factory.java | 20 -
.../java/com/alibaba/jstorm/zk/ZkConstant.java | 19 -
.../com/alibaba/jstorm/zk/ZkCreateModes.java | 21 -
.../com/alibaba/jstorm/zk/ZkEventTypes.java | 27 -
.../com/alibaba/jstorm/zk/ZkKeeperStates.java | 24 -
.../main/java/com/alibaba/jstorm/zk/ZkTool.java | 201 -
.../java/com/alibaba/jstorm/zk/Zookeeper.java | 217 -
jstorm-client/pom.xml | 194 -
.../src/main/java/backtype/storm/Config.java | 1382 -
.../java/backtype/storm/ConfigValidation.java | 100 -
.../src/main/java/backtype/storm/Constants.java | 20 -
.../backtype/storm/GenericOptionsParser.java | 296 -
.../main/java/backtype/storm/ILocalCluster.java | 46 -
.../main/java/backtype/storm/ILocalDRPC.java | 10 -
.../java/backtype/storm/StormSubmitter.java | 375 -
.../src/main/java/backtype/storm/Tool.java | 47 -
.../main/java/backtype/storm/ToolRunner.java | 53 -
.../backtype/storm/clojure/ClojureBolt.java | 108 -
.../backtype/storm/clojure/ClojureSpout.java | 142 -
.../backtype/storm/clojure/RichShellBolt.java | 36 -
.../backtype/storm/clojure/RichShellSpout.java | 36 -
.../java/backtype/storm/command/activate.java | 49 -
.../backtype/storm/command/config_value.java | 33 -
.../java/backtype/storm/command/deactivate.java | 49 -
.../backtype/storm/command/kill_topology.java | 61 -
.../main/java/backtype/storm/command/list.java | 59 -
.../backtype/storm/command/metrics_monitor.java | 56 -
.../java/backtype/storm/command/rebalance.java | 60 -
.../java/backtype/storm/command/restart.java | 120 -
.../storm/coordination/BatchBoltExecutor.java | 92 -
.../coordination/BatchOutputCollector.java | 31 -
.../coordination/BatchOutputCollectorImpl.java | 36 -
.../coordination/BatchSubtopologyBuilder.java | 469 -
.../storm/coordination/CoordinatedBolt.java | 379 -
.../backtype/storm/coordination/IBatchBolt.java | 16 -
.../backtype/storm/daemon/Shutdownable.java | 5 -
.../storm/drpc/DRPCInvocationsClient.java | 78 -
.../java/backtype/storm/drpc/DRPCSpout.java | 162 -
.../java/backtype/storm/drpc/JoinResult.java | 59 -
.../java/backtype/storm/drpc/KeyedFairBolt.java | 75 -
.../storm/drpc/LinearDRPCInputDeclarer.java | 42 -
.../storm/drpc/LinearDRPCTopologyBuilder.java | 378 -
.../backtype/storm/drpc/PrepareRequest.java | 41 -
.../java/backtype/storm/drpc/ReturnResults.java | 83 -
.../storm/generated/AlreadyAliveException.java | 328 -
.../storm/generated/AuthorizationException.java | 328 -
.../java/backtype/storm/generated/Bolt.java | 427 -
.../backtype/storm/generated/BoltStats.java | 1111 -
.../storm/generated/ClusterSummary.java | 693 -
.../storm/generated/ComponentCommon.java | 707 -
.../storm/generated/ComponentObject.java | 385 -
.../storm/generated/DRPCExecutionException.java | 328 -
.../backtype/storm/generated/DRPCRequest.java | 425 -
.../storm/generated/DistributedRPC.java | 964 -
.../generated/DistributedRPCInvocations.java | 2015 --
.../backtype/storm/generated/ErrorInfo.java | 425 -
.../backtype/storm/generated/ExecutorInfo.java | 420 -
.../storm/generated/ExecutorSpecificStats.java | 318 -
.../storm/generated/GlobalStreamId.java | 425 -
.../java/backtype/storm/generated/Grouping.java | 714 -
.../generated/InvalidTopologyException.java | 328 -
.../backtype/storm/generated/JavaObject.java | 463 -
.../backtype/storm/generated/JavaObjectArg.java | 532 -
.../backtype/storm/generated/KillOptions.java | 320 -
.../storm/generated/MonitorOptions.java | 320 -
.../java/backtype/storm/generated/Nimbus.java | 18351 -----------
.../storm/generated/NotAliveException.java | 328 -
.../backtype/storm/generated/NullStruct.java | 225 -
.../storm/generated/RebalanceOptions.java | 409 -
.../storm/generated/ShellComponent.java | 417 -
.../backtype/storm/generated/SpoutSpec.java | 427 -
.../backtype/storm/generated/SpoutStats.java | 756 -
.../storm/generated/StateSpoutSpec.java | 427 -
.../backtype/storm/generated/StormTopology.java | 660 -
.../backtype/storm/generated/StreamInfo.java | 462 -
.../backtype/storm/generated/SubmitOptions.java | 340 -
.../storm/generated/SupervisorSummary.java | 706 -
.../storm/generated/SupervisorWorkers.java | 464 -
.../storm/generated/TaskMetricData.java | 1135 -
.../backtype/storm/generated/TaskStats.java | 1285 -
.../backtype/storm/generated/TaskSummary.java | 1122 -
.../storm/generated/ThriftResourceType.java | 47 -
.../generated/TopologyAssignException.java | 328 -
.../backtype/storm/generated/TopologyInfo.java | 1022 -
.../storm/generated/TopologyInitialStatus.java | 44 -
.../storm/generated/TopologyMetricInfo.java | 594 -
.../storm/generated/TopologySummary.java | 900 -
.../backtype/storm/generated/UserDefMetric.java | 522 -
.../storm/generated/WorkerMetricData.java | 1135 -
.../backtype/storm/generated/WorkerSummary.java | 560 -
.../storm/grouping/CustomStreamGrouping.java | 29 -
.../java/backtype/storm/hooks/BaseTaskHook.java | 44 -
.../java/backtype/storm/hooks/ITaskHook.java | 28 -
.../backtype/storm/hooks/info/BoltAckInfo.java | 15 -
.../storm/hooks/info/BoltExecuteInfo.java | 16 -
.../backtype/storm/hooks/info/BoltFailInfo.java | 15 -
.../backtype/storm/hooks/info/EmitInfo.java | 19 -
.../backtype/storm/hooks/info/SpoutAckInfo.java | 14 -
.../storm/hooks/info/SpoutFailInfo.java | 13 -
.../backtype/storm/messaging/IConnection.java | 36 -
.../java/backtype/storm/messaging/IContext.java | 58 -
.../backtype/storm/messaging/TaskMessage.java | 51 -
.../storm/messaging/TransportFactory.java | 54 -
.../storm/metric/LoggingMetricsConsumer.java | 59 -
.../storm/metric/MetricsConsumerBolt.java | 56 -
.../java/backtype/storm/metric/SystemBolt.java | 157 -
.../storm/metric/api/AssignableMetric.java | 17 -
.../storm/metric/api/CombinedMetric.java | 21 -
.../backtype/storm/metric/api/CountMetric.java | 24 -
.../backtype/storm/metric/api/ICombiner.java | 7 -
.../java/backtype/storm/metric/api/IMetric.java | 5 -
.../storm/metric/api/IMetricsConsumer.java | 56 -
.../backtype/storm/metric/api/IReducer.java | 9 -
.../storm/metric/api/IStatefulObject.java | 5 -
.../backtype/storm/metric/api/MeanReducer.java | 39 -
.../storm/metric/api/MultiCountMetric.java | 28 -
.../storm/metric/api/MultiReducedMetric.java | 33 -
.../storm/metric/api/ReducedMetric.java | 21 -
.../backtype/storm/metric/api/StateMetric.java | 14 -
.../metric/api/rpc/AssignableShellMetric.java | 30 -
.../metric/api/rpc/CombinedShellMetric.java | 31 -
.../storm/metric/api/rpc/CountShellMetric.java | 38 -
.../storm/metric/api/rpc/IShellMetric.java | 31 -
.../metric/api/rpc/ReducedShellMetric.java | 32 -
.../java/backtype/storm/multilang/BoltMsg.java | 80 -
.../backtype/storm/multilang/ISerializer.java | 82 -
.../storm/multilang/JsonSerializer.java | 202 -
.../storm/multilang/NoOutputException.java | 40 -
.../java/backtype/storm/multilang/ShellMsg.java | 185 -
.../java/backtype/storm/multilang/SpoutMsg.java | 51 -
.../storm/nimbus/DefaultTopologyValidator.java | 16 -
.../storm/nimbus/ITopologyValidator.java | 12 -
.../backtype/storm/planner/CompoundSpout.java | 7 -
.../backtype/storm/planner/CompoundTask.java | 7 -
.../java/backtype/storm/planner/TaskBundle.java | 15 -
.../java/backtype/storm/scheduler/Cluster.java | 448 -
.../storm/scheduler/ExecutorDetails.java | 38 -
.../java/backtype/storm/scheduler/INimbus.java | 28 -
.../backtype/storm/scheduler/IScheduler.java | 27 -
.../backtype/storm/scheduler/ISupervisor.java | 33 -
.../storm/scheduler/SchedulerAssignment.java | 45 -
.../scheduler/SchedulerAssignmentImpl.java | 98 -
.../storm/scheduler/SupervisorDetails.java | 73 -
.../backtype/storm/scheduler/Topologies.java | 42 -
.../storm/scheduler/TopologyDetails.java | 76 -
.../backtype/storm/scheduler/WorkerSlot.java | 87 -
.../backtype/storm/security/auth/AuthUtils.java | 94 -
.../storm/security/auth/IAuthorizer.java | 38 -
.../storm/security/auth/ITransportPlugin.java | 49 -
.../storm/security/auth/ReqContext.java | 91 -
.../security/auth/SaslTransportPlugin.java | 150 -
.../security/auth/SimpleTransportPlugin.java | 119 -
.../storm/security/auth/ThriftClient.java | 138 -
.../storm/security/auth/ThriftServer.java | 70 -
.../auth/authorizer/DenyAuthorizer.java | 52 -
.../auth/authorizer/NoopAuthorizer.java | 53 -
.../auth/digest/ClientCallbackHandler.java | 101 -
.../auth/digest/DigestSaslTransportPlugin.java | 52 -
.../auth/digest/ServerCallbackHandler.java | 109 -
.../serialization/BlowfishTupleSerializer.java | 80 -
.../storm/serialization/DefaultKryoFactory.java | 47 -
.../storm/serialization/IKryoDecorator.java | 7 -
.../storm/serialization/IKryoFactory.java | 25 -
.../storm/serialization/ITupleDeserializer.java | 9 -
.../storm/serialization/ITupleSerializer.java | 15 -
.../serialization/KryoTupleDeserializer.java | 90 -
.../serialization/KryoTupleSerializer.java | 67 -
.../serialization/KryoValuesDeserializer.java | 39 -
.../serialization/KryoValuesSerializer.java | 45 -
.../serialization/SerializableSerializer.java | 46 -
.../serialization/SerializationFactory.java | 242 -
.../types/ArrayListSerializer.java | 14 -
.../serialization/types/HashMapSerializer.java | 14 -
.../serialization/types/HashSetSerializer.java | 14 -
.../types/ListDelegateSerializer.java | 14 -
.../storm/spout/IMultiSchemableSpout.java | 7 -
.../backtype/storm/spout/ISchemableSpout.java | 7 -
.../main/java/backtype/storm/spout/ISpout.java | 116 -
.../storm/spout/ISpoutOutputCollector.java | 15 -
.../storm/spout/ISpoutWaitStrategy.java | 18 -
.../java/backtype/storm/spout/MultiScheme.java | 12 -
.../storm/spout/NothingEmptyEmitStrategy.java | 14 -
.../backtype/storm/spout/RawMultiScheme.java | 20 -
.../java/backtype/storm/spout/RawScheme.java | 15 -
.../main/java/backtype/storm/spout/Scheme.java | 11 -
.../storm/spout/SchemeAsMultiScheme.java | 28 -
.../java/backtype/storm/spout/ShellSpout.java | 260 -
.../storm/spout/SleepSpoutWaitStrategy.java | 25 -
.../storm/spout/SpoutOutputCollector.java | 125 -
.../java/backtype/storm/state/IStateSpout.java | 15 -
.../storm/state/IStateSpoutOutputCollector.java | 5 -
.../backtype/storm/state/ISubscribedState.java | 9 -
.../state/ISynchronizeOutputCollector.java | 7 -
.../storm/state/StateSpoutOutputCollector.java | 11 -
.../storm/state/SynchronizeOutputCollector.java | 12 -
.../storm/task/GeneralTopologyContext.java | 206 -
.../main/java/backtype/storm/task/IBolt.java | 98 -
.../backtype/storm/task/IErrorReporter.java | 5 -
.../backtype/storm/task/IMetricsContext.java | 18 -
.../backtype/storm/task/IOutputCollector.java | 20 -
.../backtype/storm/task/OutputCollector.java | 245 -
.../java/backtype/storm/task/ShellBolt.java | 383 -
.../backtype/storm/task/TopologyContext.java | 317 -
.../storm/task/WorkerTopologyContext.java | 84 -
.../backtype/storm/testing/AckFailDelegate.java | 9 -
.../storm/testing/AckFailMapTracker.java | 35 -
.../java/backtype/storm/testing/AckTracker.java | 35 -
.../backtype/storm/testing/BatchNumberList.java | 55 -
.../storm/testing/BatchProcessWord.java | 23 -
.../backtype/storm/testing/BatchRepeatA.java | 30 -
.../backtype/storm/testing/BoltTracker.java | 24 -
.../storm/testing/CompleteTopologyParam.java | 57 -
.../storm/testing/CountingBatchBolt.java | 39 -
.../storm/testing/CountingCommitBolt.java | 42 -
.../backtype/storm/testing/FeederSpout.java | 83 -
.../java/backtype/storm/testing/FixedTuple.java | 25 -
.../backtype/storm/testing/FixedTupleSpout.java | 164 -
.../backtype/storm/testing/IdentityBolt.java | 25 -
.../storm/testing/KeyedCountingBatchBolt.java | 45 -
.../testing/KeyedCountingCommitterBolt.java | 8 -
.../storm/testing/KeyedSummingBatchBolt.java | 45 -
.../storm/testing/MemoryTransactionalSpout.java | 187 -
.../testing/MemoryTransactionalSpoutMeta.java | 21 -
.../backtype/storm/testing/MkClusterParam.java | 46 -
.../backtype/storm/testing/MkTupleParam.java | 37 -
.../backtype/storm/testing/MockedSources.java | 46 -
.../java/backtype/storm/testing/NGrouping.java | 34 -
.../storm/testing/NonRichBoltTracker.java | 34 -
.../testing/OpaqueMemoryTransactionalSpout.java | 190 -
.../storm/testing/PrepareBatchBolt.java | 33 -
.../backtype/storm/testing/SpoutTracker.java | 88 -
.../storm/testing/TestAggregatesCounter.java | 46 -
.../backtype/storm/testing/TestConfBolt.java | 44 -
.../backtype/storm/testing/TestGlobalCount.java | 43 -
.../java/backtype/storm/testing/TestJob.java | 24 -
.../storm/testing/TestKryoDecorator.java | 14 -
.../backtype/storm/testing/TestPlannerBolt.java | 28 -
.../storm/testing/TestPlannerSpout.java | 67 -
.../backtype/storm/testing/TestSerObject.java | 40 -
.../backtype/storm/testing/TestWordCounter.java | 43 -
.../backtype/storm/testing/TestWordSpout.java | 70 -
.../backtype/storm/testing/TrackedTopology.java | 17 -
.../storm/testing/TupleCaptureBolt.java | 68 -
.../topology/BaseConfigurationDeclarer.java | 41 -
.../storm/topology/BasicBoltExecutor.java | 50 -
.../storm/topology/BasicOutputCollector.java | 44 -
.../backtype/storm/topology/BoltDeclarer.java | 6 -
.../ComponentConfigurationDeclarer.java | 18 -
.../storm/topology/FailedException.java | 19 -
.../backtype/storm/topology/IBasicBolt.java | 20 -
.../storm/topology/IBasicOutputCollector.java | 11 -
.../backtype/storm/topology/IComponent.java | 30 -
.../java/backtype/storm/topology/IRichBolt.java | 12 -
.../backtype/storm/topology/IRichSpout.java | 12 -
.../storm/topology/IRichStateSpout.java | 7 -
.../backtype/storm/topology/InputDeclarer.java | 48 -
.../storm/topology/OutputFieldsDeclarer.java | 16 -
.../storm/topology/OutputFieldsGetter.java | 36 -
.../storm/topology/ReportedFailedException.java | 19 -
.../backtype/storm/topology/SpoutDeclarer.java | 6 -
.../storm/topology/TopologyBuilder.java | 424 -
.../storm/topology/base/BaseBasicBolt.java | 16 -
.../storm/topology/base/BaseBatchBolt.java | 9 -
.../storm/topology/base/BaseComponent.java | 11 -
...BaseOpaquePartitionedTransactionalSpout.java | 8 -
.../base/BasePartitionedTransactionalSpout.java | 9 -
.../storm/topology/base/BaseRichBolt.java | 9 -
.../storm/topology/base/BaseRichSpout.java | 33 -
.../topology/base/BaseTransactionalBolt.java | 8 -
.../topology/base/BaseTransactionalSpout.java | 9 -
.../storm/transactional/ICommitter.java | 9 -
.../ICommitterTransactionalSpout.java | 13 -
.../transactional/ITransactionalSpout.java | 88 -
.../storm/transactional/TransactionAttempt.java | 44 -
.../TransactionalSpoutBatchExecutor.java | 84 -
.../TransactionalSpoutCoordinator.java | 220 -
.../TransactionalTopologyBuilder.java | 566 -
.../IOpaquePartitionedTransactionalSpout.java | 46 -
.../IPartitionedTransactionalSpout.java | 60 -
...uePartitionedTransactionalSpoutExecutor.java | 153 -
.../PartitionedTransactionalSpoutExecutor.java | 136 -
.../state/RotatingTransactionalState.java | 143 -
.../transactional/state/TransactionalState.java | 132 -
.../main/java/backtype/storm/tuple/Fields.java | 82 -
.../main/java/backtype/storm/tuple/ITuple.java | 119 -
.../java/backtype/storm/tuple/MessageId.java | 86 -
.../main/java/backtype/storm/tuple/Tuple.java | 43 -
.../java/backtype/storm/tuple/TupleExt.java | 12 -
.../java/backtype/storm/tuple/TupleImpl.java | 342 -
.../java/backtype/storm/tuple/TupleImplExt.java | 31 -
.../main/java/backtype/storm/tuple/Values.java | 20 -
.../storm/utils/BufferFileInputStream.java | 37 -
.../backtype/storm/utils/CRC32OutputStream.java | 27 -
.../backtype/storm/utils/ClojureTimerTask.java | 18 -
.../java/backtype/storm/utils/Container.java | 7 -
.../java/backtype/storm/utils/DRPCClient.java | 70 -
.../backtype/storm/utils/DisruptorQueue.java | 71 -
.../storm/utils/DisruptorQueueImpl.java | 298 -
.../storm/utils/DisruptorWrapBlockingQueue.java | 192 -
.../storm/utils/IndifferentAccessMap.java | 169 -
.../backtype/storm/utils/InprocMessaging.java | 42 -
.../storm/utils/KeyedRoundRobinQueue.java | 51 -
.../java/backtype/storm/utils/ListDelegate.java | 139 -
.../java/backtype/storm/utils/LocalState.java | 78 -
.../java/backtype/storm/utils/MutableInt.java | 26 -
.../java/backtype/storm/utils/MutableLong.java | 26 -
.../backtype/storm/utils/MutableObject.java | 21 -
.../java/backtype/storm/utils/NimbusClient.java | 48 -
.../storm/utils/RegisteredGlobalState.java | 47 -
.../java/backtype/storm/utils/RotatingMap.java | 109 -
.../backtype/storm/utils/ServiceRegistry.java | 30 -
.../java/backtype/storm/utils/ShellProcess.java | 182 -
.../storm/utils/ThreadResourceManager.java | 28 -
.../storm/utils/ThriftTopologyUtils.java | 59 -
.../main/java/backtype/storm/utils/Time.java | 93 -
.../java/backtype/storm/utils/TimeCacheMap.java | 143 -
.../java/backtype/storm/utils/TupleHelpers.java | 16 -
.../main/java/backtype/storm/utils/Utils.java | 592 -
.../backtype/storm/utils/VersionedStore.java | 178 -
.../storm/utils/WindowedTimeThrottler.java | 35 -
.../backtype/storm/utils/WorkerClassLoader.java | 156 -
.../backtype/storm/utils/WritableUtils.java | 377 -
.../backtype/storm/utils/ZookeeperAuthInfo.java | 30 -
.../utils/disruptor/AbstractSequencerExt.java | 21 -
.../utils/disruptor/MultiProducerSequencer.java | 298 -
.../storm/utils/disruptor/RingBuffer.java | 1184 -
.../disruptor/SingleProducerSequencer.java | 199 -
.../src/main/java/storm/trident/JoinType.java | 13 -
.../src/main/java/storm/trident/Stream.java | 360 -
.../main/java/storm/trident/TridentState.java | 23 -
.../java/storm/trident/TridentTopology.java | 796 -
.../trident/drpc/ReturnResultsReducer.java | 96 -
.../fluent/ChainedAggregatorDeclarer.java | 166 -
.../fluent/ChainedFullAggregatorDeclarer.java | 15 -
.../ChainedPartitionAggregatorDeclarer.java | 15 -
.../trident/fluent/GlobalAggregationScheme.java | 9 -
.../storm/trident/fluent/GroupedStream.java | 157 -
.../trident/fluent/IAggregatableStream.java | 14 -
.../fluent/IChainedAggregatorDeclarer.java | 7 -
.../java/storm/trident/fluent/UniqueIdGen.java | 17 -
.../java/storm/trident/graph/GraphGrouper.java | 106 -
.../main/java/storm/trident/graph/Group.java | 89 -
.../storm/trident/operation/Aggregator.java | 9 -
.../java/storm/trident/operation/Assembly.java | 8 -
.../storm/trident/operation/BaseAggregator.java | 6 -
.../storm/trident/operation/BaseFilter.java | 6 -
.../storm/trident/operation/BaseFunction.java | 6 -
.../trident/operation/BaseMultiReducer.java | 16 -
.../storm/trident/operation/BaseOperation.java | 15 -
.../trident/operation/CombinerAggregator.java | 12 -
.../storm/trident/operation/EachOperation.java | 5 -
.../java/storm/trident/operation/Filter.java | 8 -
.../java/storm/trident/operation/Function.java | 7 -
.../trident/operation/GroupedMultiReducer.java | 14 -
.../storm/trident/operation/MultiReducer.java | 14 -
.../java/storm/trident/operation/Operation.java | 9 -
.../trident/operation/ReducerAggregator.java | 9 -
.../trident/operation/TridentCollector.java | 9 -
.../operation/TridentMultiReducerContext.java | 19 -
.../operation/TridentOperationContext.java | 48 -
.../storm/trident/operation/builtin/Count.java | 24 -
.../storm/trident/operation/builtin/Debug.java | 22 -
.../storm/trident/operation/builtin/Equals.java | 26 -
.../trident/operation/builtin/FilterNull.java | 14 -
.../storm/trident/operation/builtin/FirstN.java | 108 -
.../storm/trident/operation/builtin/MapGet.java | 21 -
.../storm/trident/operation/builtin/Negate.java | 31 -
.../trident/operation/builtin/SnapshotGet.java | 27 -
.../storm/trident/operation/builtin/Sum.java | 25 -
.../operation/builtin/TupleCollectionGet.java | 29 -
.../operation/impl/CaptureCollector.java | 25 -
.../operation/impl/ChainedAggregatorImpl.java | 96 -
.../trident/operation/impl/ChainedResult.java | 36 -
.../operation/impl/CombinerAggStateUpdater.java | 39 -
.../impl/CombinerAggregatorCombineImpl.java | 44 -
.../impl/CombinerAggregatorInitImpl.java | 32 -
.../trident/operation/impl/FilterExecutor.java | 36 -
.../operation/impl/GlobalBatchToPartition.java | 12 -
.../trident/operation/impl/GroupCollector.java | 31 -
.../operation/impl/GroupedAggregator.java | 79 -
.../impl/GroupedMultiReducerExecutor.java | 78 -
.../operation/impl/IdentityMultiReducer.java | 34 -
.../impl/IndexHashBatchToPartition.java | 12 -
.../operation/impl/JoinerMultiReducer.java | 142 -
.../operation/impl/ReducerAggStateUpdater.java | 36 -
.../operation/impl/ReducerAggregatorImpl.java | 39 -
.../storm/trident/operation/impl/Result.java | 10 -
.../operation/impl/SingleEmitAggregator.java | 78 -
.../trident/operation/impl/TrueFilter.java | 23 -
.../storm/trident/partition/GlobalGrouping.java | 28 -
.../trident/partition/IdentityGrouping.java | 44 -
.../trident/partition/IndexHashGrouping.java | 36 -
.../storm/trident/planner/BridgeReceiver.java | 21 -
.../main/java/storm/trident/planner/Node.java | 63 -
.../storm/trident/planner/NodeStateInfo.java | 14 -
.../storm/trident/planner/PartitionNode.java | 35 -
.../storm/trident/planner/ProcessorContext.java | 12 -
.../storm/trident/planner/ProcessorNode.java | 16 -
.../java/storm/trident/planner/SpoutNode.java | 22 -
.../storm/trident/planner/SubtopologyBolt.java | 201 -
.../storm/trident/planner/TridentProcessor.java | 23 -
.../storm/trident/planner/TupleReceiver.java | 10 -
.../planner/processor/AggregateProcessor.java | 67 -
.../planner/processor/AppendCollector.java | 45 -
.../planner/processor/EachProcessor.java | 63 -
.../planner/processor/FreshCollector.java | 42 -
.../processor/MultiReducerProcessor.java | 76 -
.../processor/PartitionPersistProcessor.java | 90 -
.../planner/processor/ProjectedProcessor.java | 56 -
.../planner/processor/StateQueryProcessor.java | 89 -
.../planner/processor/TridentContext.java | 59 -
.../storm/trident/spout/BatchSpoutExecutor.java | 75 -
.../main/java/storm/trident/spout/IBatchID.java | 7 -
.../java/storm/trident/spout/IBatchSpout.java | 16 -
.../trident/spout/ICommitterTridentSpout.java | 14 -
.../spout/IOpaquePartitionedTridentSpout.java | 45 -
.../trident/spout/IPartitionedTridentSpout.java | 60 -
.../storm/trident/spout/ISpoutPartition.java | 8 -
.../java/storm/trident/spout/ITridentSpout.java | 77 -
.../OpaquePartitionedTridentSpoutExecutor.java | 184 -
.../spout/PartitionedTridentSpoutExecutor.java | 154 -
.../trident/spout/RichSpoutBatchExecutor.java | 181 -
.../storm/trident/spout/RichSpoutBatchId.java | 32 -
.../spout/RichSpoutBatchIdSerializer.java | 21 -
.../trident/spout/RichSpoutBatchTriggerer.java | 161 -
.../trident/spout/TridentSpoutCoordinator.java | 78 -
.../trident/spout/TridentSpoutExecutor.java | 120 -
.../storm/trident/state/BaseQueryFunction.java | 8 -
.../storm/trident/state/BaseStateUpdater.java | 8 -
.../trident/state/CombinerValueUpdater.java | 19 -
.../storm/trident/state/ITupleCollection.java | 9 -
.../state/JSONNonTransactionalSerializer.java | 28 -
.../trident/state/JSONOpaqueSerializer.java | 36 -
.../state/JSONTransactionalSerializer.java | 34 -
.../java/storm/trident/state/OpaqueValue.java | 58 -
.../java/storm/trident/state/QueryFunction.java | 11 -
.../java/storm/trident/state/ReadOnlyState.java | 14 -
.../trident/state/ReducerValueUpdater.java | 24 -
.../java/storm/trident/state/Serializer.java | 9 -
.../main/java/storm/trident/state/State.java | 22 -
.../java/storm/trident/state/StateFactory.java | 9 -
.../java/storm/trident/state/StateSpec.java | 13 -
.../java/storm/trident/state/StateType.java | 8 -
.../java/storm/trident/state/StateUpdater.java | 16 -
.../storm/trident/state/TransactionalValue.java | 27 -
.../java/storm/trident/state/ValueUpdater.java | 6 -
.../trident/state/map/CachedBatchReadsMap.java | 63 -
.../java/storm/trident/state/map/CachedMap.java | 62 -
.../storm/trident/state/map/IBackingMap.java | 9 -
.../state/map/MapCombinerAggStateUpdater.java | 66 -
.../state/map/MapReducerAggStateUpdater.java | 75 -
.../java/storm/trident/state/map/MapState.java | 9 -
.../state/map/MicroBatchIBackingMap.java | 68 -
.../trident/state/map/NonTransactionalMap.java | 50 -
.../java/storm/trident/state/map/OpaqueMap.java | 107 -
.../trident/state/map/ReadOnlyMapState.java | 9 -
.../trident/state/map/RemovableMapState.java | 8 -
.../trident/state/map/SnapshottableMap.java | 59 -
.../trident/state/map/TransactionalMap.java | 92 -
.../state/snapshot/ReadOnlySnapshottable.java | 7 -
.../trident/state/snapshot/Snapshottable.java | 10 -
.../trident/testing/CountAsAggregator.java | 30 -
.../storm/trident/testing/FeederBatchSpout.java | 168 -
.../testing/FeederCommitterBatchSpout.java | 79 -
.../storm/trident/testing/FixedBatchSpout.java | 80 -
.../java/storm/trident/testing/IFeeder.java | 6 -
.../trident/testing/LRUMemoryMapState.java | 135 -
.../storm/trident/testing/MemoryBackingMap.java | 30 -
.../storm/trident/testing/MemoryMapState.java | 157 -
.../main/java/storm/trident/testing/Split.java | 19 -
.../storm/trident/testing/StringLength.java | 15 -
.../java/storm/trident/testing/TrueFilter.java | 13 -
.../java/storm/trident/testing/TuplifyArgs.java | 21 -
.../java/storm/trident/topology/BatchInfo.java | 16 -
.../trident/topology/ITridentBatchBolt.java | 15 -
.../topology/MasterBatchCoordinator.java | 317 -
.../trident/topology/TransactionAttempt.java | 49 -
.../trident/topology/TridentBoltExecutor.java | 430 -
.../topology/TridentTopologyBuilder.java | 751 -
.../state/RotatingTransactionalState.java | 130 -
.../topology/state/TransactionalState.java | 119 -
.../java/storm/trident/tuple/ComboList.java | 75 -
.../main/java/storm/trident/tuple/ConsList.java | 27 -
.../java/storm/trident/tuple/TridentTuple.java | 17 -
.../storm/trident/tuple/TridentTupleView.java | 342 -
.../java/storm/trident/tuple/ValuePointer.java | 43 -
.../storm/trident/util/ErrorEdgeFactory.java | 11 -
.../java/storm/trident/util/IndexedEdge.java | 55 -
.../main/java/storm/trident/util/LRUMap.java | 18 -
.../java/storm/trident/util/TridentUtils.java | 125 -
jstorm-client/src/main/py/__init__.py | 0
.../src/main/py/storm/DistributedRPC-remote | 85 -
.../src/main/py/storm/DistributedRPC.py | 256 -
.../py/storm/DistributedRPCInvocations-remote | 99 -
.../main/py/storm/DistributedRPCInvocations.py | 549 -
jstorm-client/src/main/py/storm/Nimbus-remote | 232 -
jstorm-client/src/main/py/storm/Nimbus.py | 4149 ---
jstorm-client/src/main/py/storm/__init__.py | 1 -
jstorm-client/src/main/py/storm/constants.py | 9 -
jstorm-client/src/main/py/storm/ttypes.py | 4254 ---
.../jstorm/util/queue/DisruptorTest.java | 545 -
jstorm-core/.classpath | 37 +
jstorm-core/.gitignore | 1 +
jstorm-core/.project | 36 +
.../.settings/org.eclipse.core.resources.prefs | 6 +
.../.settings/org.eclipse.jdt.core.prefs | 8 +
.../.settings/org.eclipse.m2e.core.prefs | 4 +
.../.settings/org.eclipse.wst.common.component | 6 +
...rg.eclipse.wst.common.project.facet.core.xml | 5 +
.../.settings/org.eclipse.wst.validation.prefs | 2 +
jstorm-core/pom.xml | 254 +
.../src/main/java/backtype/storm/Config.java | 1579 +
.../java/backtype/storm/ConfigValidation.java | 351 +
.../src/main/java/backtype/storm/Constants.java | 37 +
.../backtype/storm/GenericOptionsParser.java | 283 +
.../backtype/storm/ICredentialsListener.java | 32 +
.../main/java/backtype/storm/ILocalCluster.java | 49 +
.../main/java/backtype/storm/ILocalDRPC.java | 27 +
.../main/java/backtype/storm/LocalCluster.java | 275 +
.../java/backtype/storm/LocalClusterMap.java | 126 +
.../src/main/java/backtype/storm/LocalDRPC.java | 100 +
.../main/java/backtype/storm/LocalUtils.java | 127 +
.../java/backtype/storm/StormSubmitter.java | 366 +
.../src/main/java/backtype/storm/Tool.java | 71 +
.../main/java/backtype/storm/ToolRunner.java | 69 +
.../backtype/storm/clojure/ClojureBolt.java | 119 +
.../backtype/storm/clojure/ClojureSpout.java | 153 +
.../backtype/storm/clojure/RichShellBolt.java | 51 +
.../backtype/storm/clojure/RichShellSpout.java | 51 +
.../java/backtype/storm/command/activate.java | 65 +
.../backtype/storm/command/config_value.java | 50 +
.../java/backtype/storm/command/deactivate.java | 65 +
.../backtype/storm/command/kill_topology.java | 77 +
.../main/java/backtype/storm/command/list.java | 70 +
.../backtype/storm/command/metrics_monitor.java | 72 +
.../java/backtype/storm/command/rebalance.java | 127 +
.../java/backtype/storm/command/restart.java | 80 +
.../backtype/storm/command/update_config.java | 69 +
.../storm/coordination/BatchBoltExecutor.java | 108 +
.../coordination/BatchOutputCollector.java | 46 +
.../coordination/BatchOutputCollectorImpl.java | 53 +
.../coordination/BatchSubtopologyBuilder.java | 479 +
.../storm/coordination/CoordinatedBolt.java | 382 +
.../backtype/storm/coordination/IBatchBolt.java | 30 +
.../backtype/storm/daemon/Shutdownable.java | 22 +
.../storm/drpc/DRPCInvocationsClient.java | 114 +
.../java/backtype/storm/drpc/DRPCSpout.java | 263 +
.../java/backtype/storm/drpc/JoinResult.java | 75 +
.../java/backtype/storm/drpc/KeyedFairBolt.java | 93 +
.../storm/drpc/LinearDRPCInputDeclarer.java | 52 +
.../storm/drpc/LinearDRPCTopologyBuilder.java | 394 +
.../backtype/storm/drpc/PrepareRequest.java | 59 +
.../java/backtype/storm/drpc/ReturnResults.java | 125 +
.../storm/generated/AlreadyAliveException.java | 389 +
.../storm/generated/AuthorizationException.java | 389 +
.../java/backtype/storm/generated/Bolt.java | 497 +
.../storm/generated/ClusterSummary.java | 704 +
.../storm/generated/ComponentCommon.java | 835 +
.../storm/generated/ComponentObject.java | 445 +
.../storm/generated/ComponentSummary.java | 903 +
.../backtype/storm/generated/Credentials.java | 441 +
.../storm/generated/DRPCExecutionException.java | 389 +
.../backtype/storm/generated/DRPCRequest.java | 490 +
.../storm/generated/DistributedRPC.java | 1195 +
.../generated/DistributedRPCInvocations.java | 2545 ++
.../backtype/storm/generated/ErrorInfo.java | 488 +
.../storm/generated/GlobalStreamId.java | 490 +
.../java/backtype/storm/generated/Grouping.java | 844 +
.../generated/InvalidTopologyException.java | 389 +
.../backtype/storm/generated/JavaObject.java | 544 +
.../backtype/storm/generated/JavaObjectArg.java | 614 +
.../backtype/storm/generated/KillOptions.java | 390 +
.../storm/generated/LocalStateData.java | 454 +
.../backtype/storm/generated/MetricInfo.java | 902 +
.../backtype/storm/generated/MetricWindow.java | 441 +
.../storm/generated/MonitorOptions.java | 390 +
.../backtype/storm/generated/NettyMetric.java | 553 +
.../java/backtype/storm/generated/Nimbus.java | 28951 +++++++++++++++++
.../backtype/storm/generated/NimbusStat.java | 490 +
.../backtype/storm/generated/NimbusSummary.java | 1034 +
.../storm/generated/NotAliveException.java | 389 +
.../backtype/storm/generated/NullStruct.java | 283 +
.../storm/generated/RebalanceOptions.java | 595 +
.../storm/generated/ShellComponent.java | 499 +
.../backtype/storm/generated/SpoutSpec.java | 497 +
.../storm/generated/StateSpoutSpec.java | 497 +
.../backtype/storm/generated/StormTopology.java | 786 +
.../backtype/storm/generated/StreamInfo.java | 537 +
.../backtype/storm/generated/SubmitOptions.java | 401 +
.../storm/generated/SupervisorSummary.java | 779 +
.../storm/generated/SupervisorWorkers.java | 715 +
.../backtype/storm/generated/TaskComponent.java | 488 +
.../backtype/storm/generated/TaskSummary.java | 943 +
.../storm/generated/ThriftSerializedObject.java | 499 +
.../generated/TopologyAssignException.java | 389 +
.../backtype/storm/generated/TopologyInfo.java | 810 +
.../storm/generated/TopologyInitialStatus.java | 45 +
.../storm/generated/TopologyMetric.java | 892 +
.../storm/generated/TopologySummary.java | 990 +
.../backtype/storm/generated/WorkerSummary.java | 738 +
.../storm/generated/WorkerUploadMetrics.java | 967 +
.../storm/grouping/CustomStreamGrouping.java | 43 +
.../storm/grouping/PartialKeyGrouping.java | 82 +
.../java/backtype/storm/hooks/BaseTaskHook.java | 61 +
.../java/backtype/storm/hooks/ITaskHook.java | 38 +
.../backtype/storm/hooks/info/BoltAckInfo.java | 32 +
.../storm/hooks/info/BoltExecuteInfo.java | 32 +
.../backtype/storm/hooks/info/BoltFailInfo.java | 32 +
.../backtype/storm/hooks/info/EmitInfo.java | 35 +
.../backtype/storm/hooks/info/SpoutAckInfo.java | 30 +
.../storm/hooks/info/SpoutFailInfo.java | 30 +
.../storm/messaging/ConnectionWithStatus.java | 49 +
.../backtype/storm/messaging/IConnection.java | 54 +
.../java/backtype/storm/messaging/IContext.java | 68 +
.../backtype/storm/messaging/TaskMessage.java | 68 +
.../storm/messaging/TransportFactory.java | 68 +
.../storm/metric/LoggingMetricsConsumer.java | 72 +
.../storm/metric/MetricsConsumerBolt.java | 64 +
.../java/backtype/storm/metric/SystemBolt.java | 155 +
.../storm/metric/api/AssignableMetric.java | 34 +
.../storm/metric/api/CombinedMetric.java | 38 +
.../backtype/storm/metric/api/CountMetric.java | 41 +
.../backtype/storm/metric/api/ICombiner.java | 23 +
.../java/backtype/storm/metric/api/IMetric.java | 22 +
.../storm/metric/api/IMetricsConsumer.java | 60 +
.../backtype/storm/metric/api/IReducer.java | 24 +
.../storm/metric/api/IStatefulObject.java | 22 +
.../backtype/storm/metric/api/MeanReducer.java | 55 +
.../storm/metric/api/MultiCountMetric.java | 45 +
.../storm/metric/api/MultiReducedMetric.java | 50 +
.../storm/metric/api/ReducedMetric.java | 38 +
.../backtype/storm/metric/api/StateMetric.java | 31 +
.../metric/api/rpc/AssignableShellMetric.java | 30 +
.../metric/api/rpc/CombinedShellMetric.java | 31 +
.../storm/metric/api/rpc/CountShellMetric.java | 38 +
.../storm/metric/api/rpc/IShellMetric.java | 31 +
.../metric/api/rpc/ReducedShellMetric.java | 32 +
.../java/backtype/storm/multilang/BoltMsg.java | 80 +
.../backtype/storm/multilang/ISerializer.java | 82 +
.../storm/multilang/JsonSerializer.java | 202 +
.../storm/multilang/NoOutputException.java | 40 +
.../java/backtype/storm/multilang/ShellMsg.java | 185 +
.../java/backtype/storm/multilang/SpoutMsg.java | 51 +
.../storm/nimbus/DefaultTopologyValidator.java | 31 +
.../storm/nimbus/ITopologyValidator.java | 28 +
.../backtype/storm/planner/CompoundSpout.java | 25 +
.../backtype/storm/planner/CompoundTask.java | 25 +
.../java/backtype/storm/planner/TaskBundle.java | 33 +
.../java/backtype/storm/scheduler/Cluster.java | 448 +
.../storm/scheduler/ExecutorDetails.java | 54 +
.../java/backtype/storm/scheduler/INimbus.java | 39 +
.../backtype/storm/scheduler/IScheduler.java | 40 +
.../backtype/storm/scheduler/ISupervisor.java | 45 +
.../storm/scheduler/SchedulerAssignment.java | 58 +
.../scheduler/SchedulerAssignmentImpl.java | 110 +
.../storm/scheduler/SupervisorDetails.java | 89 +
.../backtype/storm/scheduler/Topologies.java | 57 +
.../storm/scheduler/TopologyDetails.java | 89 +
.../backtype/storm/scheduler/WorkerSlot.java | 104 +
.../scheduler/multitenant/DefaultPool.java | 219 +
.../storm/scheduler/multitenant/FreePool.java | 125 +
.../scheduler/multitenant/IsolatedPool.java | 346 +
.../multitenant/MultitenantScheduler.java | 98 +
.../storm/scheduler/multitenant/Node.java | 343 +
.../storm/scheduler/multitenant/NodePool.java | 296 +
.../storm/security/INimbusCredentialPlugin.java | 47 +
.../backtype/storm/security/auth/AuthUtils.java | 284 +
.../auth/DefaultHttpCredentialsPlugin.java | 96 +
.../security/auth/DefaultPrincipalToLocal.java | 43 +
.../storm/security/auth/IAuthorizer.java | 47 +
.../storm/security/auth/IAutoCredentials.java | 55 +
.../security/auth/ICredentialsRenewer.java | 41 +
.../auth/IGroupMappingServiceProvider.java | 42 +
.../security/auth/IHttpCredentialsPlugin.java | 50 +
.../storm/security/auth/IPrincipalToLocal.java | 41 +
.../storm/security/auth/ITransportPlugin.java | 61 +
.../security/auth/KerberosPrincipalToLocal.java | 45 +
.../storm/security/auth/ReqContext.java | 142 +
.../security/auth/SaslTransportPlugin.java | 176 +
.../security/auth/ShellBasedGroupsMapping.java | 94 +
.../security/auth/SimpleTransportPlugin.java | 163 +
.../security/auth/SingleUserPrincipal.java | 56 +
.../storm/security/auth/TBackoffConnect.java | 77 +
.../storm/security/auth/ThriftClient.java | 204 +
.../security/auth/ThriftConnectionType.java | 77 +
.../storm/security/auth/ThriftServer.java | 80 +
.../auth/authorizer/DRPCAuthorizerBase.java | 46 +
.../authorizer/DRPCSimpleACLAuthorizer.java | 157 +
.../auth/authorizer/DenyAuthorizer.java | 57 +
.../authorizer/ImpersonationAuthorizer.java | 154 +
.../auth/authorizer/NoopAuthorizer.java | 57 +
.../auth/authorizer/SimpleACLAuthorizer.java | 152 +
.../authorizer/SimpleWhitelistAuthorizer.java | 70 +
.../auth/digest/ClientCallbackHandler.java | 110 +
.../auth/digest/DigestSaslTransportPlugin.java | 70 +
.../auth/digest/ServerCallbackHandler.java | 131 +
.../storm/security/auth/kerberos/AutoTGT.java | 281 +
.../auth/kerberos/AutoTGTKrb5LoginModule.java | 108 +
.../kerberos/AutoTGTKrb5LoginModuleTest.java | 44 +
.../auth/kerberos/ClientCallbackHandler.java | 104 +
.../kerberos/KerberosSaslTransportPlugin.java | 209 +
.../auth/kerberos/ServerCallbackHandler.java | 94 +
.../auth/kerberos/jaas_kerberos_cluster.conf | 31 +
.../auth/kerberos/jaas_kerberos_launcher.conf | 12 +
.../serialization/BlowfishTupleSerializer.java | 90 +
.../storm/serialization/DefaultKryoFactory.java | 64 +
.../DefaultSerializationDelegate.java | 58 +
.../GzipBridgeSerializationDelegate.java | 65 +
.../GzipBridgeThriftSerializationDelegate.java | 64 +
.../GzipSerializationDelegate.java | 64 +
.../GzipThriftSerializationDelegate.java | 57 +
.../storm/serialization/IKryoDecorator.java | 23 +
.../storm/serialization/IKryoFactory.java | 40 +
.../storm/serialization/ITupleDeserializer.java | 25 +
.../storm/serialization/ITupleSerializer.java | 26 +
.../serialization/KryoTupleDeserializer.java | 129 +
.../serialization/KryoTupleSerializer.java | 112 +
.../serialization/KryoValuesDeserializer.java | 51 +
.../serialization/KryoValuesSerializer.java | 58 +
.../serialization/SerializableSerializer.java | 62 +
.../serialization/SerializationDelegate.java | 35 +
.../serialization/SerializationFactory.java | 226 +
.../ThriftSerializationDelegate.java | 52 +
.../types/ArrayListSerializer.java | 32 +
.../serialization/types/HashMapSerializer.java | 32 +
.../serialization/types/HashSetSerializer.java | 32 +
.../types/ListDelegateSerializer.java | 32 +
.../storm/spout/IMultiSchemableSpout.java | 23 +
.../backtype/storm/spout/ISchemableSpout.java | 24 +
.../main/java/backtype/storm/spout/ISpout.java | 105 +
.../storm/spout/ISpoutOutputCollector.java | 30 +
.../storm/spout/ISpoutWaitStrategy.java | 34 +
.../java/backtype/storm/spout/MultiScheme.java | 28 +
.../storm/spout/NothingEmptyEmitStrategy.java | 31 +
.../backtype/storm/spout/RawMultiScheme.java | 38 +
.../java/backtype/storm/spout/RawScheme.java | 32 +
.../main/java/backtype/storm/spout/Scheme.java | 28 +
.../storm/spout/SchemeAsMultiScheme.java | 41 +
.../java/backtype/storm/spout/ShellSpout.java | 266 +
.../storm/spout/SleepSpoutWaitStrategy.java | 41 +
.../storm/spout/SpoutOutputCollector.java | 134 +
.../java/backtype/storm/state/IStateSpout.java | 29 +
.../storm/state/IStateSpoutOutputCollector.java | 22 +
.../backtype/storm/state/ISubscribedState.java | 25 +
.../state/ISynchronizeOutputCollector.java | 24 +
.../storm/state/StateSpoutOutputCollector.java | 28 +
.../storm/state/SynchronizeOutputCollector.java | 30 +
.../storm/task/GeneralTopologyContext.java | 209 +
.../main/java/backtype/storm/task/IBolt.java | 84 +
.../backtype/storm/task/IErrorReporter.java | 22 +
.../backtype/storm/task/IMetricsContext.java | 31 +
.../backtype/storm/task/IOutputCollector.java | 32 +
.../backtype/storm/task/OutputCollector.java | 225 +
.../java/backtype/storm/task/ShellBolt.java | 386 +
.../backtype/storm/task/TopologyContext.java | 360 +
.../storm/task/WorkerTopologyContext.java | 109 +
.../backtype/storm/testing/AckFailDelegate.java | 25 +
.../storm/testing/AckFailMapTracker.java | 52 +
.../java/backtype/storm/testing/AckTracker.java | 52 +
.../backtype/storm/testing/BatchNumberList.java | 72 +
.../storm/testing/BatchProcessWord.java | 39 +
.../backtype/storm/testing/BatchRepeatA.java | 48 +
.../backtype/storm/testing/BoltTracker.java | 42 +
.../storm/testing/CompleteTopologyParam.java | 87 +
.../storm/testing/CountingBatchBolt.java | 55 +
.../storm/testing/CountingCommitBolt.java | 57 +
.../backtype/storm/testing/FeederSpout.java | 100 +
.../java/backtype/storm/testing/FixedTuple.java | 42 +
.../backtype/storm/testing/FixedTupleSpout.java | 179 +
.../testing/ForwardingMetricsConsumer.java | 95 +
.../backtype/storm/testing/IdentityBolt.java | 42 +
.../storm/testing/KeyedCountingBatchBolt.java | 61 +
.../testing/KeyedCountingCommitterBolt.java | 24 +
.../storm/testing/KeyedSummingBatchBolt.java | 61 +
.../storm/testing/MemoryTransactionalSpout.java | 181 +
.../testing/MemoryTransactionalSpoutMeta.java | 38 +
.../backtype/storm/testing/MkClusterParam.java | 57 +
.../backtype/storm/testing/MkTupleParam.java | 51 +
.../backtype/storm/testing/MockedSources.java | 60 +
.../java/backtype/storm/testing/NGrouping.java | 50 +
.../storm/testing/NonRichBoltTracker.java | 51 +
.../testing/OpaqueMemoryTransactionalSpout.java | 186 +
.../storm/testing/PrepareBatchBolt.java | 52 +
.../storm/testing/PythonShellMetricsBolt.java | 49 +
.../storm/testing/PythonShellMetricsSpout.java | 52 +
.../testing/SingleUserSimpleTransport.java | 37 +
.../backtype/storm/testing/SpoutTracker.java | 105 +
.../storm/testing/TestAggregatesCounter.java | 63 +
.../backtype/storm/testing/TestConfBolt.java | 62 +
.../storm/testing/TestEventLogSpout.java | 139 +
.../storm/testing/TestEventOrderCheckBolt.java | 76 +
.../backtype/storm/testing/TestGlobalCount.java | 60 +
.../java/backtype/storm/testing/TestJob.java | 41 +
.../storm/testing/TestKryoDecorator.java | 31 +
.../backtype/storm/testing/TestPlannerBolt.java | 45 +
.../storm/testing/TestPlannerSpout.java | 85 +
.../backtype/storm/testing/TestSerObject.java | 56 +
.../backtype/storm/testing/TestWordCounter.java | 61 +
.../backtype/storm/testing/TestWordSpout.java | 86 +
.../backtype/storm/testing/TrackedTopology.java | 34 +
.../storm/testing/TupleCaptureBolt.java | 83 +
.../topology/BaseConfigurationDeclarer.java | 54 +
.../storm/topology/BasicBoltExecutor.java | 67 +
.../storm/topology/BasicOutputCollector.java | 62 +
.../backtype/storm/topology/BoltDeclarer.java | 26 +
.../ComponentConfigurationDeclarer.java | 29 +
.../storm/topology/FailedException.java | 36 +
.../backtype/storm/topology/IBasicBolt.java | 33 +
.../storm/topology/IBasicOutputCollector.java | 26 +
.../backtype/storm/topology/IComponent.java | 44 +
.../java/backtype/storm/topology/IConfig.java | 28 +
.../java/backtype/storm/topology/IRichBolt.java | 29 +
.../backtype/storm/topology/IRichSpout.java | 29 +
.../storm/topology/IRichStateSpout.java | 25 +
.../backtype/storm/topology/InputDeclarer.java | 187 +
.../storm/topology/OutputFieldsDeclarer.java | 32 +
.../storm/topology/OutputFieldsGetter.java | 53 +
.../storm/topology/ReportedFailedException.java | 36 +
.../backtype/storm/topology/SpoutDeclarer.java | 22 +
.../storm/topology/TopologyBuilder.java | 384 +
.../storm/topology/base/BaseBasicBolt.java | 33 +
.../storm/topology/base/BaseBatchBolt.java | 25 +
.../storm/topology/base/BaseComponent.java | 28 +
...BaseOpaquePartitionedTransactionalSpout.java | 25 +
.../base/BasePartitionedTransactionalSpout.java | 25 +
.../storm/topology/base/BaseRichBolt.java | 26 +
.../storm/topology/base/BaseRichSpout.java | 50 +
.../topology/base/BaseTransactionalBolt.java | 24 +
.../topology/base/BaseTransactionalSpout.java | 25 +
.../storm/transactional/ICommitter.java | 26 +
.../ICommitterTransactionalSpout.java | 31 +
.../transactional/ITransactionalSpout.java | 94 +
.../storm/transactional/TransactionAttempt.java | 61 +
.../TransactionalSpoutBatchExecutor.java | 96 +
.../TransactionalSpoutCoordinator.java | 217 +
.../TransactionalTopologyBuilder.java | 553 +
.../IOpaquePartitionedTransactionalSpout.java | 56 +
.../IPartitionedTransactionalSpout.java | 69 +
...uePartitionedTransactionalSpoutExecutor.java | 159 +
.../PartitionedTransactionalSpoutExecutor.java | 142 +
.../state/RotatingTransactionalState.java | 151 +
.../state/TestTransactionalState.java | 47 +
.../transactional/state/TransactionalState.java | 173 +
.../java/backtype/storm/tuple/BatchTuple.java | 77 +
.../main/java/backtype/storm/tuple/Fields.java | 100 +
.../main/java/backtype/storm/tuple/ITuple.java | 136 +
.../java/backtype/storm/tuple/MessageId.java | 103 +
.../main/java/backtype/storm/tuple/Tuple.java | 60 +
.../java/backtype/storm/tuple/TupleExt.java | 29 +
.../java/backtype/storm/tuple/TupleImpl.java | 351 +
.../java/backtype/storm/tuple/TupleImplExt.java | 46 +
.../main/java/backtype/storm/tuple/Values.java | 37 +
.../storm/ui/InvalidRequestException.java | 37 +
.../storm/utils/BufferFileInputStream.java | 54 +
.../backtype/storm/utils/CRC32OutputStream.java | 44 +
.../backtype/storm/utils/ClojureTimerTask.java | 35 +
.../java/backtype/storm/utils/Container.java | 24 +
.../java/backtype/storm/utils/DRPCClient.java | 65 +
.../backtype/storm/utils/DisruptorQueue.java | 80 +
.../storm/utils/DisruptorQueueImpl.java | 306 +
.../storm/utils/DisruptorWrapBlockingQueue.java | 200 +
.../storm/utils/ExtendedThreadPoolExecutor.java | 67 +
.../storm/utils/IndifferentAccessMap.java | 177 +
.../backtype/storm/utils/InprocMessaging.java | 59 +
.../storm/utils/KeyedRoundRobinQueue.java | 68 +
.../java/backtype/storm/utils/ListDelegate.java | 156 +
.../java/backtype/storm/utils/LocalState.java | 99 +
.../main/java/backtype/storm/utils/Monitor.java | 268 +
.../java/backtype/storm/utils/MutableInt.java | 43 +
.../java/backtype/storm/utils/MutableLong.java | 43 +
.../backtype/storm/utils/MutableObject.java | 38 +
.../java/backtype/storm/utils/NimbusClient.java | 107 +
.../storm/utils/RegisteredGlobalState.java | 64 +
.../java/backtype/storm/utils/RotatingMap.java | 125 +
.../backtype/storm/utils/ServiceRegistry.java | 47 +
.../java/backtype/storm/utils/ShellProcess.java | 189 +
.../java/backtype/storm/utils/ShellUtils.java | 498 +
.../StormBoundedExponentialBackoffRetry.java | 74 +
.../java/backtype/storm/utils/TestUtils.java | 34 +
.../storm/utils/ThreadResourceManager.java | 45 +
.../storm/utils/ThriftTopologyUtils.java | 69 +
.../main/java/backtype/storm/utils/Time.java | 109 +
.../java/backtype/storm/utils/TimeCacheMap.java | 169 +
.../backtype/storm/utils/TransferDrainer.java | 113 +
.../java/backtype/storm/utils/TupleHelpers.java | 31 +
.../java/backtype/storm/utils/TupleUtils.java | 35 +
.../main/java/backtype/storm/utils/Utils.java | 906 +
.../java/backtype/storm/utils/VersionInfo.java | 131 +
.../backtype/storm/utils/VersionedStore.java | 187 +
.../storm/utils/WindowedTimeThrottler.java | 51 +
.../backtype/storm/utils/WorkerClassLoader.java | 188 +
.../backtype/storm/utils/WritableUtils.java | 375 +
.../backtype/storm/utils/ZookeeperAuthInfo.java | 53 +
.../storm/utils/ZookeeperServerCnxnFactory.java | 84 +
.../utils/disruptor/AbstractSequencerExt.java | 38 +
.../utils/disruptor/MultiProducerSequencer.java | 267 +
.../storm/utils/disruptor/RingBuffer.java | 996 +
.../disruptor/SingleProducerSequencer.java | 197 +
.../java/com/alibaba/jstorm/batch/BatchId.java | 83 +
.../jstorm/batch/BatchTopologyBuilder.java | 88 +
.../com/alibaba/jstorm/batch/IBatchSpout.java | 55 +
.../com/alibaba/jstorm/batch/ICommitter.java | 52 +
.../com/alibaba/jstorm/batch/IPostCommit.java | 29 +
.../alibaba/jstorm/batch/IPrepareCommit.java | 38 +
.../jstorm/batch/impl/BatchSpoutMsgId.java | 65 +
.../jstorm/batch/impl/BatchSpoutTrigger.java | 333 +
.../jstorm/batch/impl/CoordinatedBolt.java | 300 +
.../alibaba/jstorm/batch/util/BatchCommon.java | 89 +
.../com/alibaba/jstorm/batch/util/BatchDef.java | 41 +
.../alibaba/jstorm/batch/util/BatchStatus.java | 43 +
.../com/alibaba/jstorm/cache/JStormCache.java | 49 +
.../com/alibaba/jstorm/cache/RocksDBCache.java | 311 +
.../alibaba/jstorm/cache/RocksTTLDBCache.java | 469 +
.../alibaba/jstorm/cache/TimeoutMemCache.java | 178 +
.../jstorm/callback/AsyncLoopDefaultKill.java | 42 +
.../jstorm/callback/AsyncLoopRunnable.java | 127 +
.../jstorm/callback/AsyncLoopThread.java | 140 +
.../alibaba/jstorm/callback/BaseCallback.java | 28 +
.../com/alibaba/jstorm/callback/Callback.java | 30 +
.../jstorm/callback/ClusterStateCallback.java | 22 +
.../jstorm/callback/DefaultWatcherCallBack.java | 45 +
.../jstorm/callback/RunnableCallback.java | 64 +
.../jstorm/callback/WatcherCallBack.java | 25 +
.../callback/impl/ActiveTransitionCallback.java | 36 +
.../impl/DelayStatusTransitionCallback.java | 111 +
.../impl/DoRebalanceTransitionCallback.java | 307 +
.../impl/DoneRebalanceTransitionCallback.java | 22 +
.../impl/InactiveTransitionCallback.java | 38 +
.../callback/impl/KillTransitionCallback.java | 38 +
.../impl/ReassignTransitionCallback.java | 66 +
.../impl/RebalanceTransitionCallback.java | 42 +
.../callback/impl/RemoveTransitionCallback.java | 71 +
.../impl/UpdateConfTransitionCallback.java | 79 +
.../alibaba/jstorm/client/ConfigExtension.java | 867 +
.../alibaba/jstorm/client/WorkerAssignment.java | 283 +
.../jstorm/client/spout/IAckValueSpout.java | 32 +
.../jstorm/client/spout/IFailValueSpout.java | 32 +
.../com/alibaba/jstorm/cluster/Cluster.java | 398 +
.../alibaba/jstorm/cluster/ClusterState.java | 56 +
.../java/com/alibaba/jstorm/cluster/Common.java | 845 +
.../alibaba/jstorm/cluster/DaemonCommon.java | 22 +
.../jstorm/cluster/DistributedClusterState.java | 242 +
.../com/alibaba/jstorm/cluster/StormBase.java | 144 +
.../jstorm/cluster/StormClusterState.java | 162 +
.../com/alibaba/jstorm/cluster/StormConfig.java | 549 +
.../alibaba/jstorm/cluster/StormMonitor.java | 50 +
.../com/alibaba/jstorm/cluster/StormStatus.java | 123 +
.../jstorm/cluster/StormZkClusterState.java | 759 +
.../alibaba/jstorm/common/metric/Counter.java | 55 +
.../com/alibaba/jstorm/common/metric/Gauge.java | 61 +
.../alibaba/jstorm/common/metric/Histogram.java | 104 +
.../jstorm/common/metric/Histogram.java.bak | 71 +
.../jstorm/common/metric/LongCounter.java | 39 +
.../com/alibaba/jstorm/common/metric/Meter.java | 50 +
.../jstorm/common/metric/MetricFilter.java | 46 +
.../jstorm/common/metric/MetricRegistry.java | 316 +
.../alibaba/jstorm/common/metric/MetricSet.java | 28 +
.../jstorm/common/metric/QueueGauge.java | 61 +
.../com/alibaba/jstorm/common/metric/Timer.java | 108 +
.../jstorm/common/metric/TimerRatio.java | 73 +
.../com/alibaba/jstorm/common/metric/Top.java | 157 +
.../jstorm/common/metric/operator/Sampling.java | 38 +
.../common/metric/operator/StartTime.java | 22 +
.../operator/convert/AtomicLongToLong.java | 35 +
.../metric/operator/convert/Convertor.java | 25 +
.../operator/convert/DefaultConvertor.java | 29 +
.../metric/operator/convert/SetToList.java | 39 +
.../metric/operator/merger/AvgMerger.java | 52 +
.../metric/operator/merger/AvgMerger.java.bak | 53 +
.../metric/operator/merger/LongSumMerger.java | 43 +
.../common/metric/operator/merger/Merger.java | 25 +
.../metric/operator/merger/SumMerger.java | 39 +
.../metric/operator/merger/TpsMerger.java | 65 +
.../metric/operator/updater/AddUpdater.java | 38 +
.../metric/operator/updater/AvgUpdater.java | 62 +
.../metric/operator/updater/AvgUpdater.java.bak | 73 +
.../operator/updater/DoubleAddUpdater.java | 45 +
.../metric/operator/updater/LongAddUpdater.java | 45 +
.../common/metric/operator/updater/Updater.java | 25 +
.../jstorm/common/metric/window/AllWindow.java | 78 +
.../jstorm/common/metric/window/Metric.java | 231 +
.../common/metric/window/RollingWindow.java | 194 +
.../common/metric/window/StatBuckets.java | 153 +
.../jstorm/common/stats/StaticsType.java | 22 +
.../alibaba/jstorm/container/CgroupCenter.java | 224 +
.../jstorm/container/CgroupOperation.java | 46 +
.../alibaba/jstorm/container/CgroupUtils.java | 178 +
.../com/alibaba/jstorm/container/Constants.java | 30 +
.../com/alibaba/jstorm/container/Hierarchy.java | 109 +
.../com/alibaba/jstorm/container/SubSystem.java | 70 +
.../alibaba/jstorm/container/SubSystemType.java | 48 +
.../jstorm/container/cgroup/CgroupCommon.java | 249 +
.../container/cgroup/CgroupCommonOperation.java | 48 +
.../container/cgroup/CgroupCoreFactory.java | 78 +
.../alibaba/jstorm/container/cgroup/Device.java | 68 +
.../jstorm/container/cgroup/core/BlkioCore.java | 306 +
.../container/cgroup/core/CgroupCore.java | 26 +
.../jstorm/container/cgroup/core/CpuCore.java | 148 +
.../container/cgroup/core/CpuacctCore.java | 79 +
.../container/cgroup/core/CpusetCore.java | 274 +
.../container/cgroup/core/DevicesCore.java | 188 +
.../container/cgroup/core/FreezerCore.java | 67 +
.../container/cgroup/core/MemoryCore.java | 229 +
.../container/cgroup/core/NetClsCore.java | 75 +
.../container/cgroup/core/NetPrioCore.java | 72 +
.../jstorm/daemon/nimbus/DefaultInimbus.java | 72 +
.../jstorm/daemon/nimbus/NimbusCache.java | 146 +
.../jstorm/daemon/nimbus/NimbusData.java | 327 +
.../jstorm/daemon/nimbus/NimbusServer.java | 417 +
.../jstorm/daemon/nimbus/NimbusUtils.java | 723 +
.../jstorm/daemon/nimbus/ServiceHandler.java | 1523 +
.../jstorm/daemon/nimbus/StatusTransition.java | 294 +
.../jstorm/daemon/nimbus/StatusType.java | 63 +
.../jstorm/daemon/nimbus/TopologyAssign.java | 996 +
.../daemon/nimbus/TopologyAssignEvent.java | 117 +
.../daemon/nimbus/TopologyMetricsRunnable.java | 727 +
.../jstorm/daemon/nimbus/TopologyNettyMgr.java | 105 +
.../jstorm/daemon/supervisor/CgroupManager.java | 160 +
.../jstorm/daemon/supervisor/Heartbeat.java | 155 +
.../jstorm/daemon/supervisor/Httpserver.java | 482 +
.../jstorm/daemon/supervisor/SandBoxMaker.java | 221 +
.../jstorm/daemon/supervisor/ShutdownWork.java | 193 +
.../daemon/supervisor/StateHeartbeat.java | 53 +
.../jstorm/daemon/supervisor/Supervisor.java | 247 +
.../daemon/supervisor/SupervisorDaemon.java | 30 +
.../daemon/supervisor/SupervisorInfo.java | 186 +
.../daemon/supervisor/SupervisorManger.java | 197 +
.../daemon/supervisor/SyncProcessEvent.java | 1057 +
.../daemon/supervisor/SyncSupervisorEvent.java | 618 +
.../daemon/worker/BatchDrainerRunable.java | 60 +
.../jstorm/daemon/worker/ContextMaker.java | 128 +
.../jstorm/daemon/worker/DrainerRunable.java | 139 +
.../jstorm/daemon/worker/LocalAssignment.java | 162 +
.../jstorm/daemon/worker/ProcessSimulator.java | 100 +
.../jstorm/daemon/worker/RefreshActive.java | 139 +
.../daemon/worker/RefreshConnections.java | 368 +
.../daemon/worker/ShutdownableDameon.java | 27 +
.../com/alibaba/jstorm/daemon/worker/State.java | 28 +
.../daemon/worker/VirtualPortDispatch.java | 99 +
.../alibaba/jstorm/daemon/worker/Worker.java | 492 +
.../jstorm/daemon/worker/WorkerData.java | 615 +
.../jstorm/daemon/worker/WorkerHeartbeat.java | 89 +
.../jstorm/daemon/worker/WorkerShutdown.java | 186 +
.../daemon/worker/hearbeat/SyncContainerHb.java | 386 +
.../worker/hearbeat/WorkerHeartbeatRunable.java | 127 +
.../daemon/worker/timer/RotatingMapTrigger.java | 86 +
.../worker/timer/TaskBatchFlushTrigger.java | 50 +
.../worker/timer/TaskHeartbeatTrigger.java | 84 +
.../daemon/worker/timer/TickTupleTrigger.java | 62 +
.../daemon/worker/timer/TimerConstants.java | 26 +
.../daemon/worker/timer/TimerTrigger.java | 169 +
.../com/alibaba/jstorm/drpc/ClearThread.java | 79 +
.../main/java/com/alibaba/jstorm/drpc/Drpc.java | 319 +
.../com/alibaba/jstorm/event/EventManager.java | 28 +
.../alibaba/jstorm/event/EventManagerImp.java | 84 +
.../jstorm/event/EventManagerPusher.java | 50 +
.../jstorm/event/EventManagerZkPusher.java | 42 +
.../jstorm/message/netty/ControlMessage.java | 86 +
.../jstorm/message/netty/MessageBatch.java | 194 +
.../jstorm/message/netty/MessageDecoder.java | 236 +
.../jstorm/message/netty/MessageEncoder.java | 40 +
.../jstorm/message/netty/NettyClient.java | 614 +
.../jstorm/message/netty/NettyClientAsync.java | 350 +
.../jstorm/message/netty/NettyClientSync.java | 332 +
.../jstorm/message/netty/NettyConnection.java | 91 +
.../jstorm/message/netty/NettyContext.java | 150 +
.../message/netty/NettyRenameThreadFactory.java | 55 +
.../jstorm/message/netty/NettyServer.java | 234 +
.../jstorm/message/netty/ReconnectRunnable.java | 72 +
.../message/netty/StormClientHandler.java | 107 +
.../netty/StormClientPipelineFactory.java | 51 +
.../message/netty/StormServerHandler.java | 140 +
.../netty/StormServerPipelineFactory.java | 48 +
.../alibaba/jstorm/metric/AlimonitorClient.java | 267 +
.../jstorm/metric/JStormHealthCheck.java | 79 +
.../alibaba/jstorm/metric/JStormMetrics.java | 281 +
.../jstorm/metric/JStormMetricsReporter.java | 406 +
.../com/alibaba/jstorm/metric/MetricDef.java | 163 +
.../com/alibaba/jstorm/metric/MetricJstack.java | 140 +
.../alibaba/jstorm/metric/MetricSendClient.java | 18 +
.../com/alibaba/jstorm/metric/MetricThrift.java | 129 +
.../jstorm/metric/SimpleJStormMetric.java | 96 +
.../jstorm/queue/disruptor/JstormEvent.java | 41 +
.../queue/disruptor/JstormEventHandler.java | 47 +
.../jstorm/queue/disruptor/JstormProducer.java | 63 +
.../com/alibaba/jstorm/schedule/Assignment.java | 232 +
.../alibaba/jstorm/schedule/AssignmentBak.java | 55 +
.../alibaba/jstorm/schedule/CleanRunnable.java | 83 +
.../jstorm/schedule/DelayEventRunnable.java | 44 +
.../jstorm/schedule/FollowerRunnable.java | 306 +
.../jstorm/schedule/IToplogyScheduler.java | 31 +
.../jstorm/schedule/MonitorRunnable.java | 125 +
.../jstorm/schedule/TopologyAssignContext.java | 179 +
.../DefaultTopologyAssignContext.java | 221 +
.../DefaultTopologyScheduler.java | 190 +
.../default_assign/ResourceWorkerSlot.java | 162 +
.../Selector/AbstractSelector.java | 71 +
.../Selector/ComponentNumSelector.java | 56 +
.../Selector/InputComponentNumSelector.java | 55 +
.../default_assign/Selector/Selector.java | 27 +
.../Selector/TotalTaskNumSelector.java | 52 +
.../Selector/WorkerComparator.java | 33 +
.../default_assign/TaskAssignContext.java | 133 +
.../schedule/default_assign/TaskScheduler.java | 334 +
.../default_assign/WorkerScheduler.java | 387 +
.../main/java/com/alibaba/jstorm/task/Task.java | 370 +
.../com/alibaba/jstorm/task/TaskBaseMetric.java | 132 +
.../alibaba/jstorm/task/TaskBatchReceiver.java | 99 +
.../alibaba/jstorm/task/TaskBatchTransfer.java | 141 +
.../java/com/alibaba/jstorm/task/TaskInfo.java | 78 +
.../com/alibaba/jstorm/task/TaskReceiver.java | 228 +
.../alibaba/jstorm/task/TaskShutdownDameon.java | 188 +
.../com/alibaba/jstorm/task/TaskStatus.java | 58 +
.../com/alibaba/jstorm/task/TaskTransfer.java | 216 +
.../com/alibaba/jstorm/task/TkHbCacheTime.java | 65 +
.../com/alibaba/jstorm/task/UptimeComputer.java | 38 +
.../alibaba/jstorm/task/acker/AckObject.java | 36 +
.../com/alibaba/jstorm/task/acker/Acker.java | 164 +
.../jstorm/task/comm/TaskSendTargets.java | 151 +
.../com/alibaba/jstorm/task/comm/TupleInfo.java | 73 +
.../jstorm/task/comm/UnanchoredSend.java | 54 +
.../jstorm/task/error/ITaskReportErr.java | 28 +
.../alibaba/jstorm/task/error/TaskError.java | 55 +
.../jstorm/task/error/TaskErrorRunable.java | 48 +
.../jstorm/task/error/TaskReportError.java | 59 +
.../task/error/TaskReportErrorAndDie.java | 43 +
.../jstorm/task/execute/BaseExecutors.java | 232 +
.../jstorm/task/execute/BoltCollector.java | 259 +
.../jstorm/task/execute/BoltExecutors.java | 290 +
.../jstorm/task/execute/spout/AckSpoutMsg.java | 79 +
.../jstorm/task/execute/spout/FailSpoutMsg.java | 71 +
.../jstorm/task/execute/spout/IAckMsg.java | 22 +
.../spout/MultipleThreadSpoutExecutors.java | 131 +
.../spout/SingleThreadSpoutExecutors.java | 96 +
.../task/execute/spout/SpoutCollector.java | 218 +
.../task/execute/spout/SpoutExecutors.java | 357 +
.../execute/spout/SpoutTimeoutCallBack.java | 74 +
.../alibaba/jstorm/task/group/GrouperType.java | 28 +
.../jstorm/task/group/MkCustomGrouper.java | 49 +
.../jstorm/task/group/MkFieldsGrouper.java | 60 +
.../alibaba/jstorm/task/group/MkGrouper.java | 203 +
.../alibaba/jstorm/task/group/MkLocalFirst.java | 144 +
.../jstorm/task/group/MkLocalShuffer.java | 73 +
.../alibaba/jstorm/task/group/MkShuffer.java | 55 +
.../com/alibaba/jstorm/task/group/Shuffer.java | 50 +
.../jstorm/task/heartbeat/TaskHeartbeat.java | 98 +
.../task/heartbeat/TaskHeartbeatRunable.java | 191 +
.../jstorm/task/heartbeat/TaskStats.java | 38 +
.../alibaba/jstorm/utils/DisruptorQueue.java | 168 +
.../alibaba/jstorm/utils/DisruptorRunable.java | 113 +
.../com/alibaba/jstorm/utils/EPlatform.java | 50 +
.../com/alibaba/jstorm/utils/EventSampler.java | 116 +
.../alibaba/jstorm/utils/ExpiredCallback.java | 22 +
.../utils/FailedAssignTopologyException.java | 39 +
.../com/alibaba/jstorm/utils/FileAttribute.java | 135 +
.../alibaba/jstorm/utils/HttpserverUtils.java | 46 +
.../com/alibaba/jstorm/utils/IntervalCheck.java | 83 +
.../alibaba/jstorm/utils/JStormServerUtils.java | 153 +
.../com/alibaba/jstorm/utils/JStormUtils.java | 1279 +
.../java/com/alibaba/jstorm/utils/LoadConf.java | 141 +
.../com/alibaba/jstorm/utils/NetWorkUtils.java | 180 +
.../java/com/alibaba/jstorm/utils/OSInfo.java | 157 +
.../alibaba/jstorm/utils/OlderFileFilter.java | 46 +
.../java/com/alibaba/jstorm/utils/Pair.java | 53 +
.../com/alibaba/jstorm/utils/PathUtils.java | 148 +
.../com/alibaba/jstorm/utils/RandomRange.java | 74 +
.../com/alibaba/jstorm/utils/RotatingMap.java | 149 +
.../com/alibaba/jstorm/utils/RunCounter.java | 100 +
.../com/alibaba/jstorm/utils/SmartThread.java | 35 +
.../alibaba/jstorm/utils/SystemOperation.java | 76 +
.../java/com/alibaba/jstorm/utils/Thrift.java | 501 +
.../com/alibaba/jstorm/utils/TimeCacheMap.java | 173 +
.../alibaba/jstorm/utils/TimeCacheQueue.java | 186 +
.../com/alibaba/jstorm/utils/TimeFormat.java | 219 +
.../com/alibaba/jstorm/utils/TimeOutMap.java | 34 +
.../com/alibaba/jstorm/utils/TimeUtils.java | 51 +
.../java/com/alibaba/jstorm/zk/Factory.java | 37 +
.../com/alibaba/jstorm/zk/ZkCreateModes.java | 38 +
.../com/alibaba/jstorm/zk/ZkEventTypes.java | 44 +
.../com/alibaba/jstorm/zk/ZkKeeperStates.java | 41 +
.../main/java/com/alibaba/jstorm/zk/ZkTool.java | 218 +
.../java/com/alibaba/jstorm/zk/Zookeeper.java | 237 +
.../src/main/java/storm/trident/JoinType.java | 30 +
.../src/main/java/storm/trident/Stream.java | 377 +
.../main/java/storm/trident/TridentState.java | 40 +
.../java/storm/trident/TridentTopology.java | 815 +
.../trident/drpc/ReturnResultsReducer.java | 121 +
.../fluent/ChainedAggregatorDeclarer.java | 183 +
.../fluent/ChainedFullAggregatorDeclarer.java | 32 +
.../ChainedPartitionAggregatorDeclarer.java | 32 +
.../trident/fluent/GlobalAggregationScheme.java | 26 +
.../storm/trident/fluent/GroupedStream.java | 174 +
.../trident/fluent/IAggregatableStream.java | 31 +
.../fluent/IChainedAggregatorDeclarer.java | 24 +
.../java/storm/trident/fluent/UniqueIdGen.java | 34 +
.../java/storm/trident/graph/GraphGrouper.java | 123 +
.../main/java/storm/trident/graph/Group.java | 87 +
.../storm/trident/operation/Aggregator.java | 26 +
.../java/storm/trident/operation/Assembly.java | 25 +
.../storm/trident/operation/BaseAggregator.java | 23 +
.../storm/trident/operation/BaseFilter.java | 23 +
.../storm/trident/operation/BaseFunction.java | 23 +
.../trident/operation/BaseMultiReducer.java | 33 +
.../storm/trident/operation/BaseOperation.java | 32 +
.../trident/operation/CombinerAggregator.java | 29 +
.../storm/trident/operation/EachOperation.java | 22 +
.../java/storm/trident/operation/Filter.java | 25 +
.../java/storm/trident/operation/Function.java | 24 +
.../trident/operation/GroupedMultiReducer.java | 31 +
.../storm/trident/operation/MultiReducer.java | 31 +
.../java/storm/trident/operation/Operation.java | 26 +
.../trident/operation/ReducerAggregator.java | 26 +
.../trident/operation/TridentCollector.java | 26 +
.../operation/TridentMultiReducerContext.java | 36 +
.../operation/TridentOperationContext.java | 65 +
.../storm/trident/operation/builtin/Count.java | 41 +
.../storm/trident/operation/builtin/Debug.java | 39 +
.../storm/trident/operation/builtin/Equals.java | 38 +
.../trident/operation/builtin/FilterNull.java | 31 +
.../storm/trident/operation/builtin/FirstN.java | 125 +
.../storm/trident/operation/builtin/MapGet.java | 38 +
.../storm/trident/operation/builtin/Negate.java | 48 +
.../trident/operation/builtin/SnapshotGet.java | 44 +
.../storm/trident/operation/builtin/Sum.java | 42 +
.../operation/builtin/TupleCollectionGet.java | 46 +
.../operation/impl/CaptureCollector.java | 42 +
.../operation/impl/ChainedAggregatorImpl.java | 113 +
.../trident/operation/impl/ChainedResult.java | 53 +
.../operation/impl/CombinerAggStateUpdater.java | 56 +
.../impl/CombinerAggregatorCombineImpl.java | 61 +
.../impl/CombinerAggregatorInitImpl.java | 49 +
.../trident/operation/impl/FilterExecutor.java | 53 +
.../operation/impl/GlobalBatchToPartition.java | 29 +
.../trident/operation/impl/GroupCollector.java | 48 +
.../operation/impl/GroupedAggregator.java | 96 +
.../impl/GroupedMultiReducerExecutor.java | 95 +
.../operation/impl/IdentityMultiReducer.java | 51 +
.../impl/IndexHashBatchToPartition.java | 29 +
.../operation/impl/JoinerMultiReducer.java | 159 +
.../operation/impl/ReducerAggStateUpdater.java | 53 +
.../operation/impl/ReducerAggregatorImpl.java | 56 +
.../storm/trident/operation/impl/Result.java | 27 +
.../operation/impl/SingleEmitAggregator.java | 95 +
.../trident/operation/impl/TrueFilter.java | 40 +
.../storm/trident/partition/GlobalGrouping.java | 45 +
.../trident/partition/IdentityGrouping.java | 61 +
.../trident/partition/IndexHashGrouping.java | 53 +
.../storm/trident/planner/BridgeReceiver.java | 38 +
.../main/java/storm/trident/planner/Node.java | 64 +
.../storm/trident/planner/NodeStateInfo.java | 31 +
.../storm/trident/planner/PartitionNode.java | 52 +
.../storm/trident/planner/ProcessorContext.java | 29 +
.../storm/trident/planner/ProcessorNode.java | 33 +
.../java/storm/trident/planner/SpoutNode.java | 39 +
.../storm/trident/planner/SubtopologyBolt.java | 218 +
.../storm/trident/planner/TridentProcessor.java | 40 +
.../storm/trident/planner/TupleReceiver.java | 27 +
.../planner/processor/AggregateProcessor.java | 84 +
.../planner/processor/AppendCollector.java | 62 +
.../planner/processor/EachProcessor.java | 80 +
.../planner/processor/FreshCollector.java | 59 +
.../processor/MultiReducerProcessor.java | 93 +
.../processor/PartitionPersistProcessor.java | 107 +
.../planner/processor/ProjectedProcessor.java | 73 +
.../planner/processor/StateQueryProcessor.java | 106 +
.../planner/processor/TridentContext.java | 76 +
.../storm/trident/spout/BatchSpoutExecutor.java | 92 +
.../main/java/storm/trident/spout/IBatchID.java | 24 +
.../java/storm/trident/spout/IBatchSpout.java | 33 +
.../trident/spout/ICommitterTridentSpout.java | 31 +
.../spout/IOpaquePartitionedTridentSpout.java | 62 +
.../trident/spout/IPartitionedTridentSpout.java | 77 +
.../storm/trident/spout/ISpoutPartition.java | 25 +
.../java/storm/trident/spout/ITridentSpout.java | 94 +
.../OpaquePartitionedTridentSpoutExecutor.java | 201 +
.../spout/PartitionedTridentSpoutExecutor.java | 171 +
.../trident/spout/RichSpoutBatchExecutor.java | 199 +
.../storm/trident/spout/RichSpoutBatchId.java | 49 +
.../spout/RichSpoutBatchIdSerializer.java | 38 +
.../trident/spout/RichSpoutBatchTriggerer.java | 178 +
.../trident/spout/TridentSpoutCoordinator.java | 94 +
.../trident/spout/TridentSpoutExecutor.java | 138 +
.../storm/trident/state/BaseQueryFunction.java | 25 +
.../storm/trident/state/BaseStateUpdater.java | 25 +
.../trident/state/CombinerValueUpdater.java | 36 +
.../storm/trident/state/ITupleCollection.java | 26 +
.../state/JSONNonTransactionalSerializer.java | 44 +
.../trident/state/JSONOpaqueSerializer.java | 52 +
.../state/JSONTransactionalSerializer.java | 50 +
.../java/storm/trident/state/OpaqueValue.java | 75 +
.../java/storm/trident/state/QueryFunction.java | 28 +
.../java/storm/trident/state/ReadOnlyState.java | 31 +
.../trident/state/ReducerValueUpdater.java | 41 +
.../java/storm/trident/state/Serializer.java | 26 +
.../main/java/storm/trident/state/State.java | 39 +
.../java/storm/trident/state/StateFactory.java | 26 +
.../java/storm/trident/state/StateSpec.java | 30 +
.../java/storm/trident/state/StateType.java | 25 +
.../java/storm/trident/state/StateUpdater.java | 33 +
.../storm/trident/state/TransactionalValue.java | 44 +
.../java/storm/trident/state/ValueUpdater.java | 23 +
.../trident/state/map/CachedBatchReadsMap.java | 80 +
.../java/storm/trident/state/map/CachedMap.java | 79 +
.../storm/trident/state/map/IBackingMap.java | 26 +
.../state/map/MapCombinerAggStateUpdater.java | 83 +
.../state/map/MapReducerAggStateUpdater.java | 92 +
.../java/storm/trident/state/map/MapState.java | 26 +
.../state/map/MicroBatchIBackingMap.java | 85 +
.../trident/state/map/NonTransactionalMap.java | 67 +
.../java/storm/trident/state/map/OpaqueMap.java | 124 +
.../trident/state/map/ReadOnlyMapState.java | 26 +
.../trident/state/map/RemovableMapState.java | 25 +
.../trident/state/map/SnapshottableMap.java | 76 +
.../trident/state/map/TransactionalMap.java | 109 +
.../state/snapshot/ReadOnlySnapshottable.java | 24 +
.../trident/state/snapshot/Snapshottable.java | 27 +
.../trident/testing/CountAsAggregator.java | 47 +
.../storm/trident/testing/FeederBatchSpout.java | 185 +
.../testing/FeederCommitterBatchSpout.java | 96 +
.../storm/trident/testing/FixedBatchSpout.java | 97 +
.../java/storm/trident/testing/IFeeder.java | 23 +
.../trident/testing/LRUMemoryMapState.java | 154 +
.../storm/trident/testing/MemoryBackingMap.java | 47 +
.../storm/trident/testing/MemoryMapState.java | 176 +
.../main/java/storm/trident/testing/Split.java | 36 +
.../storm/trident/testing/StringLength.java | 32 +
.../java/storm/trident/testing/TrueFilter.java | 30 +
.../java/storm/trident/testing/TuplifyArgs.java | 37 +
.../java/storm/trident/topology/BatchInfo.java | 33 +
.../trident/topology/ITridentBatchBolt.java | 32 +
.../topology/MasterBatchCoordinator.java | 289 +
.../trident/topology/TransactionAttempt.java | 66 +
.../trident/topology/TridentBoltExecutor.java | 426 +
.../topology/TridentTopologyBuilder.java | 782 +
.../state/RotatingTransactionalState.java | 147 +
.../topology/state/TestTransactionalState.java | 47 +
.../topology/state/TransactionalState.java | 171 +
.../java/storm/trident/tuple/ComboList.java | 92 +
.../main/java/storm/trident/tuple/ConsList.java | 44 +
.../java/storm/trident/tuple/TridentTuple.java | 34 +
.../storm/trident/tuple/TridentTupleView.java | 359 +
.../java/storm/trident/tuple/ValuePointer.java | 60 +
.../storm/trident/util/ErrorEdgeFactory.java | 28 +
.../java/storm/trident/util/IndexedEdge.java | 50 +
.../main/java/storm/trident/util/LRUMap.java | 35 +
.../java/storm/trident/util/TridentUtils.java | 142 +
jstorm-core/src/main/py/__init__.py | 0
.../src/main/py/storm/DistributedRPC-remote | 94 +
jstorm-core/src/main/py/storm/DistributedRPC.py | 265 +
.../py/storm/DistributedRPCInvocations-remote | 108 +
.../main/py/storm/DistributedRPCInvocations.py | 565 +
jstorm-core/src/main/py/storm/Nimbus-remote | 304 +
jstorm-core/src/main/py/storm/Nimbus.py | 5971 ++++
jstorm-core/src/main/py/storm/__init__.py | 1 +
jstorm-core/src/main/py/storm/constants.py | 11 +
jstorm-core/src/main/py/storm/ttypes.py | 4456 +++
jstorm-core/src/main/resources/defaults.yaml | 342 +
jstorm-core/src/main/resources/logback-test.xml | 17 +
jstorm-core/src/main/resources/version | 1 +
.../com/alibaba/jstorm/cache/RocksDBTest.java | 466 +
.../jstorm/message/context/ContextTest.java | 60 +
.../jstorm/message/netty/NettyUnitTest.java | 794 +
.../alibaba/jstorm/message/zmq/ZmqUnitTest.java | 317 +
.../com/alibaba/jstorm/metric/MetricTest.java | 380 +
.../superivosr/deamon/HttpserverTest.java | 43 +
.../alibaba/jstorm/topology/SingleJoinBolt.java | 132 +
.../alibaba/jstorm/topology/SingleJoinTest.java | 80 +
.../jstorm/topology/TransactionalWordsTest.java | 280 +
.../jstorm/util/queue/DisruptorTest.java | 566 +
.../alibaba/jstorm/utils/JStormUtilsTest.java | 171 +
.../com/alibaba/jstorm/utils/TestThrift.java | 31 +
jstorm-server/bin/.project | 29 -
.../bin/.settings/org.eclipse.jdt.core.prefs | 5 -
.../bin/.settings/org.eclipse.m2e.core.prefs | 4 -
jstorm-server/bin/jstorm.py | 396 -
jstorm-server/bin/start.sh | 75 -
jstorm-server/bin/stop.sh | 15 -
jstorm-server/conf/aloha_log4j.properties | 19 -
jstorm-server/conf/aloha_logback.xml | 14 -
jstorm-server/conf/cgconfig.conf | 18 -
jstorm-server/conf/jstorm.log4j.properties | 50 -
jstorm-server/conf/jstorm.logback.xml | 78 -
jstorm-server/conf/storm.yaml | 83 -
jstorm-server/pom.xml | 87 -
.../main/java/backtype/storm/LocalCluster.java | 238 -
.../java/backtype/storm/LocalClusterMap.java | 111 -
.../src/main/java/backtype/storm/LocalDRPC.java | 87 -
.../main/java/backtype/storm/LocalUtils.java | 114 -
.../callback/impl/ActiveTransitionCallback.java | 19 -
.../impl/DelayStatusTransitionCallback.java | 96 -
.../impl/DoRebalanceTransitionCallback.java | 54 -
.../impl/InactiveTransitionCallback.java | 21 -
.../callback/impl/KillTransitionCallback.java | 21 -
.../impl/ReassignTransitionCallback.java | 49 -
.../impl/RebalanceTransitionCallback.java | 27 -
.../callback/impl/RemoveTransitionCallback.java | 51 -
.../com/alibaba/jstorm/cluster/Cluster.java | 339 -
.../java/com/alibaba/jstorm/cluster/Common.java | 691 -
.../alibaba/jstorm/cluster/DaemonCommon.java | 5 -
.../com/alibaba/jstorm/cluster/StormBase.java | 114 -
.../jstorm/cluster/StormClusterState.java | 160 -
.../com/alibaba/jstorm/cluster/StormConfig.java | 476 -
.../alibaba/jstorm/cluster/StormMonitor.java | 33 -
.../com/alibaba/jstorm/cluster/StormStatus.java | 106 -
.../jstorm/cluster/StormZkClusterState.java | 823 -
.../alibaba/jstorm/container/CgroupCenter.java | 204 -
.../jstorm/container/CgroupOperation.java | 29 -
.../alibaba/jstorm/container/CgroupUtils.java | 158 -
.../com/alibaba/jstorm/container/Constants.java | 13 -
.../com/alibaba/jstorm/container/Hierarchy.java | 92 -
.../com/alibaba/jstorm/container/SubSystem.java | 53 -
.../alibaba/jstorm/container/SubSystemType.java | 31 -
.../jstorm/container/SystemOperation.java | 58 -
.../jstorm/container/cgroup/CgroupCommon.java | 229 -
.../container/cgroup/CgroupCommonOperation.java | 31 -
.../container/cgroup/CgroupCoreFactory.java | 60 -
.../alibaba/jstorm/container/cgroup/Device.java | 53 -
.../jstorm/container/cgroup/core/BlkioCore.java | 274 -
.../container/cgroup/core/CgroupCore.java | 10 -
.../jstorm/container/cgroup/core/CpuCore.java | 132 -
.../container/cgroup/core/CpuacctCore.java | 59 -
.../container/cgroup/core/CpusetCore.java | 232 -
.../container/cgroup/core/DevicesCore.java | 170 -
.../container/cgroup/core/FreezerCore.java | 50 -
.../container/cgroup/core/MemoryCore.java | 204 -
.../container/cgroup/core/NetClsCore.java | 57 -
.../container/cgroup/core/NetPrioCore.java | 54 -
.../jstorm/daemon/nimbus/DefaultInimbus.java | 55 -
.../jstorm/daemon/nimbus/NimbusData.java | 234 -
.../jstorm/daemon/nimbus/NimbusServer.java | 405 -
.../jstorm/daemon/nimbus/NimbusUtils.java | 798 -
.../jstorm/daemon/nimbus/ServiceHandler.java | 1353 -
.../jstorm/daemon/nimbus/StatusTransition.java | 258 -
.../jstorm/daemon/nimbus/StatusType.java | 48 -
.../jstorm/daemon/nimbus/TopologyAssign.java | 834 -
.../daemon/nimbus/TopologyAssignEvent.java | 91 -
.../daemon/nimbus/TransitionZkCallback.java | 24 -
.../jstorm/daemon/supervisor/CgroupManager.java | 102 -
.../jstorm/daemon/supervisor/Heartbeat.java | 127 -
.../jstorm/daemon/supervisor/Httpserver.java | 460 -
.../jstorm/daemon/supervisor/SandBoxMaker.java | 195 -
.../jstorm/daemon/supervisor/ShutdownWork.java | 140 -
.../daemon/supervisor/StateHeartbeat.java | 36 -
.../jstorm/daemon/supervisor/Supervisor.java | 249 -
.../daemon/supervisor/SupervisorDaemon.java | 13 -
.../daemon/supervisor/SupervisorInfo.java | 153 -
.../daemon/supervisor/SupervisorManger.java | 177 -
.../daemon/supervisor/SyncProcessEvent.java | 882 -
.../daemon/supervisor/SyncSupervisorEvent.java | 448 -
.../jstorm/daemon/worker/BatchTupleRunable.java | 127 -
.../jstorm/daemon/worker/ContextMaker.java | 107 -
.../jstorm/daemon/worker/DrainerRunable.java | 55 -
.../jstorm/daemon/worker/ProcessSimulator.java | 81 -
.../jstorm/daemon/worker/RefreshActive.java | 138 -
.../daemon/worker/RefreshConnections.java | 214 -
.../com/alibaba/jstorm/daemon/worker/State.java | 11 -
.../daemon/worker/VirtualPortDispatch.java | 84 -
.../alibaba/jstorm/daemon/worker/Worker.java | 478 -
.../jstorm/daemon/worker/WorkerData.java | 459 -
.../jstorm/daemon/worker/WorkerHaltRunable.java | 13 -
.../jstorm/daemon/worker/WorkerHeartbeat.java | 72 -
.../jstorm/daemon/worker/WorkerMetricInfo.java | 156 -
.../jstorm/daemon/worker/WorkerShutdown.java | 166 -
.../daemon/worker/hearbeat/SyncContainerHb.java | 361 -
.../worker/hearbeat/WorkerHeartbeatRunable.java | 101 -
.../daemon/worker/metrics/AlimonitorClient.java | 267 -
.../daemon/worker/metrics/MetricKVMsg.java | 254 -
.../daemon/worker/metrics/MetricReporter.java | 135 -
.../daemon/worker/metrics/MetricSendClient.java | 18 -
.../worker/metrics/StormMetricReporter.java | 433 -
.../worker/metrics/TopoCommStatsInfo.java | 229 -
.../worker/metrics/UploadMetricFromZK.java | 242 -
.../worker/metrics/UploadSupervMetric.java | 235 -
.../daemon/worker/timer/RotatingMapTrigger.java | 64 -
.../daemon/worker/timer/TickTupleTrigger.java | 39 -
.../daemon/worker/timer/TimerTrigger.java | 118 -
.../com/alibaba/jstorm/drpc/ClearThread.java | 66 -
.../main/java/com/alibaba/jstorm/drpc/Drpc.java | 294 -
.../com/alibaba/jstorm/event/EventManager.java | 11 -
.../alibaba/jstorm/event/EventManagerImp.java | 79 -
.../jstorm/event/EventManagerImpExecute.java | 57 -
.../jstorm/event/EventManagerPusher.java | 47 -
.../jstorm/event/EventManagerZkPusher.java | 25 -
.../jstorm/message/netty/ControlMessage.java | 62 -
.../jstorm/message/netty/MessageBatch.java | 178 -
.../jstorm/message/netty/MessageDecoder.java | 147 -
.../jstorm/message/netty/MessageEncoder.java | 23 -
.../jstorm/message/netty/NettyClient.java | 450 -
.../jstorm/message/netty/NettyClientAsync.java | 339 -
.../jstorm/message/netty/NettyClientSync.java | 279 -
.../jstorm/message/netty/NettyContext.java | 115 -
.../message/netty/NettyRenameThreadFactory.java | 35 -
.../jstorm/message/netty/NettyServer.java | 199 -
.../jstorm/message/netty/ReconnectRunnable.java | 55 -
.../message/netty/StormClientHandler.java | 87 -
.../netty/StormClientPipelineFactory.java | 27 -
.../message/netty/StormServerHandler.java | 99 -
.../netty/StormServerPipelineFactory.java | 27 -
.../jstorm/message/zeroMq/MQContext.java | 132 -
.../jstorm/message/zeroMq/PacketPair.java | 73 -
.../message/zeroMq/ZMQRecvConnection.java | 105 -
.../message/zeroMq/ZMQSendConnection.java | 92 -
.../alibaba/jstorm/message/zeroMq/ZeroMq.java | 112 -
.../jstorm/queue/disruptor/JstormEvent.java | 23 -
.../queue/disruptor/JstormEventHandler.java | 29 -
.../jstorm/queue/disruptor/JstormProducer.java | 45 -
.../alibaba/jstorm/schedule/CleanRunnable.java | 65 -
.../jstorm/schedule/DelayEventRunnable.java | 25 -
.../jstorm/schedule/FollowerRunnable.java | 269 -
.../jstorm/schedule/IToplogyScheduler.java | 14 -
.../jstorm/schedule/MonitorRunnable.java | 101 -
.../jstorm/schedule/TopologyAssignContext.java | 151 -
.../DefaultTopologyAssignContext.java | 203 -
.../DefaultTopologyScheduler.java | 162 -
.../default_assign/ResourceWorkerSlot.java | 139 -
.../Selector/AbstractSelector.java | 54 -
.../Selector/ComponentNumSelector.java | 37 -
.../Selector/InputComponentNumSelector.java | 36 -
.../default_assign/Selector/Selector.java | 10 -
.../Selector/TotalTaskNumSelector.java | 35 -
.../Selector/WorkerComparator.java | 15 -
.../schedule/default_assign/TaskGanker.java | 209 -
.../default_assign/TaskGankerContext.java | 93 -
.../schedule/default_assign/WorkerMaker.java | 300 -
.../alibaba/jstorm/stats/CommonStatsData.java | 277 -
.../jstorm/stats/CommonStatsRolling.java | 332 -
.../com/alibaba/jstorm/stats/StatFunction.java | 129 -
.../jstorm/stats/incval/IncValExtractor.java | 22 -
.../jstorm/stats/incval/IncValMerger.java | 38 -
.../jstorm/stats/incval/IncValUpdater.java | 36 -
.../jstorm/stats/keyAvg/KeyAvgExtractor.java | 27 -
.../jstorm/stats/keyAvg/KeyAvgMerge.java | 43 -
.../jstorm/stats/keyAvg/KeyAvgUpdater.java | 34 -
.../jstorm/stats/rolling/RollingWindow.java | 165 -
.../jstorm/stats/rolling/RollingWindowSet.java | 124 -
.../jstorm/stats/rolling/UpdateParams.java | 23 -
.../com/alibaba/jstorm/task/Assignment.java | 172 -
.../com/alibaba/jstorm/task/AssignmentBak.java | 38 -
.../alibaba/jstorm/task/LocalAssignment.java | 130 -
.../alibaba/jstorm/task/ShutdownableDameon.java | 10 -
.../main/java/com/alibaba/jstorm/task/Task.java | 292 -
.../java/com/alibaba/jstorm/task/TaskInfo.java | 58 -
.../com/alibaba/jstorm/task/TaskMetricInfo.java | 152 -
.../alibaba/jstorm/task/TaskShutdownDameon.java | 151 -
.../com/alibaba/jstorm/task/TaskStatus.java | 41 -
.../com/alibaba/jstorm/task/TaskTransfer.java | 145 -
.../com/alibaba/jstorm/task/TkHbCacheTime.java | 48 -
.../com/alibaba/jstorm/task/UptimeComputer.java | 21 -
.../alibaba/jstorm/task/acker/AckObject.java | 19 -
.../com/alibaba/jstorm/task/acker/Acker.java | 145 -
.../jstorm/task/comm/TaskSendTargets.java | 133 -
.../com/alibaba/jstorm/task/comm/TupleInfo.java | 56 -
.../jstorm/task/comm/UnanchoredSend.java | 37 -
.../jstorm/task/error/ITaskReportErr.java | 11 -
.../alibaba/jstorm/task/error/TaskError.java | 38 -
.../jstorm/task/error/TaskErrorRunable.java | 31 -
.../jstorm/task/error/TaskReportError.java | 41 -
.../task/error/TaskReportErrorAndDie.java | 26 -
.../jstorm/task/execute/BaseExecutors.java | 295 -
.../jstorm/task/execute/BoltCollector.java | 236 -
.../jstorm/task/execute/BoltExecutors.java | 210 -
.../jstorm/task/execute/spout/AckSpoutMsg.java | 60 -
.../jstorm/task/execute/spout/FailSpoutMsg.java | 55 -
.../jstorm/task/execute/spout/IAckMsg.java | 5 -
.../spout/MultipleThreadSpoutExecutors.java | 116 -
.../spout/SingleThreadSpoutExecutors.java | 91 -
.../task/execute/spout/SpoutCollector.java | 194 -
.../task/execute/spout/SpoutExecutors.java | 260 -
.../execute/spout/SpoutTimeoutCallBack.java | 54 -
.../jstorm/task/execute/spout/TimerRatio.java | 58 -
.../alibaba/jstorm/task/group/GrouperType.java | 11 -
.../jstorm/task/group/MkCustomGrouper.java | 34 -
.../jstorm/task/group/MkFieldsGrouper.java | 43 -
.../alibaba/jstorm/task/group/MkGrouper.java | 183 -
.../alibaba/jstorm/task/group/MkLocalFirst.java | 132 -
.../jstorm/task/group/MkLocalShuffer.java | 55 -
.../alibaba/jstorm/task/group/MkShuffer.java | 38 -
.../com/alibaba/jstorm/task/group/Shuffer.java | 31 -
.../jstorm/task/heartbeat/TaskHeartbeat.java | 85 -
.../task/heartbeat/TaskHeartbeatRunable.java | 147 -
.../jstorm/task/heartbeat/TaskStats.java | 21 -
.../alibaba/jstorm/utils/DisruptorRunable.java | 89 -
.../utils/FailedAssignTopologyException.java | 22 -
.../alibaba/jstorm/utils/JStormServerUtils.java | 132 -
.../java/com/alibaba/jstorm/utils/Thrift.java | 484 -
jstorm-server/src/main/resources/defaults.yaml | 281 -
.../jstorm/message/context/ContextTest.java | 43 -
.../jstorm/message/netty/NettyUnitTest.java | 781 -
.../alibaba/jstorm/message/zmq/ZmqUnitTest.java | 299 -
.../superivosr/deamon/HttpserverTest.java | 25 -
.../alibaba/jstorm/topology/SingleJoinBolt.java | 128 -
.../alibaba/jstorm/topology/SingleJoinTest.java | 79 -
.../jstorm/topology/TransactionalWordsTest.java | 274 -
.../alibaba/jstorm/utils/JStormUtilsTest.java | 158 -
.../src/test/resources/log4j.properties | 47 -
jstorm-ui/.classpath | 32 +
jstorm-ui/.gitignore | 1 +
jstorm-ui/.project | 42 +
jstorm-ui/.settings/.jsdtscope | 13 +
.../.settings/org.eclipse.core.resources.prefs | 4 +
jstorm-ui/.settings/org.eclipse.jdt.core.prefs | 8 +
jstorm-ui/.settings/org.eclipse.m2e.core.prefs | 4 +
.../.settings/org.eclipse.wst.common.component | 13 +
...ipse.wst.common.project.facet.core.prefs.xml | 12 +
...rg.eclipse.wst.common.project.facet.core.xml | 9 +
.../org.eclipse.wst.jsdt.ui.superType.container | 1 +
.../org.eclipse.wst.jsdt.ui.superType.name | 1 +
.../.settings/org.eclipse.wst.validation.prefs | 2 +
jstorm-ui/nb-configuration.xml | 0
jstorm-ui/pom.xml | 246 +-
.../alibaba/jstorm/ui/DescendComparator.java | 67 +-
.../alibaba/jstorm/ui/NimbusClientManager.java | 113 +
.../main/java/com/alibaba/jstorm/ui/UIDef.java | 114 +
.../java/com/alibaba/jstorm/ui/UIMetrics.java | 62 +
.../java/com/alibaba/jstorm/ui/UIUtils.java | 1127 +-
.../alibaba/jstorm/ui/model/ClusterInfo.java | 45 -
.../alibaba/jstorm/ui/model/ClusterSumm.java | 104 -
.../com/alibaba/jstorm/ui/model/ColumnData.java | 59 +
.../alibaba/jstorm/ui/model/ComponentInput.java | 87 -
.../jstorm/ui/model/ComponentOutput.java | 51 -
.../alibaba/jstorm/ui/model/ComponentStats.java | 85 -
.../jstorm/ui/model/ComponentSummary.java | 45 -
.../alibaba/jstorm/ui/model/ComponentTask.java | 96 -
.../com/alibaba/jstorm/ui/model/Components.java | 52 -
.../alibaba/jstorm/ui/model/ErrorSummary.java | 37 -
.../com/alibaba/jstorm/ui/model/LinkData.java | 65 +
.../alibaba/jstorm/ui/model/LogPageIndex.java | 28 -
.../alibaba/jstorm/ui/model/NimbusSlave.java | 67 -
.../alibaba/jstorm/ui/model/PageGenerator.java | 32 +
.../com/alibaba/jstorm/ui/model/PageIndex.java | 151 +
.../alibaba/jstorm/ui/model/SpoutOutput.java | 90 -
.../alibaba/jstorm/ui/model/SupervisorSumm.java | 78 -
.../com/alibaba/jstorm/ui/model/TableData.java | 41 +
.../alibaba/jstorm/ui/model/TaskMetrics.java | 151 -
.../com/alibaba/jstorm/ui/model/TaskSumm.java | 84 -
.../alibaba/jstorm/ui/model/TopologySumm.java | 78 -
.../jstorm/ui/model/WinComponentStats.java | 24 -
.../alibaba/jstorm/ui/model/WorkerMetrics.java | 183 -
.../com/alibaba/jstorm/ui/model/WorkerSumm.java | 114 -
.../alibaba/jstorm/ui/model/data/BoltPage.java | 391 -
.../jstorm/ui/model/data/ClusterPage.java | 101 -
.../alibaba/jstorm/ui/model/data/ConfPage.java | 180 -
.../jstorm/ui/model/data/JStackPage.java | 154 -
.../jstorm/ui/model/data/ListLogPage.java | 237 -
.../alibaba/jstorm/ui/model/data/LogPage.java | 409 -
.../alibaba/jstorm/ui/model/data/MainPage.java | 208 -
.../com/alibaba/jstorm/ui/model/data/Param.java | 100 -
.../alibaba/jstorm/ui/model/data/SpoutPage.java | 398 -
.../jstorm/ui/model/data/SupervisorPage.java | 176 -
.../alibaba/jstorm/ui/model/data/Taskpage.java | 159 -
.../jstorm/ui/model/data/TopologyPage.java | 295 -
.../jstorm/ui/model/pages/ClusterPage.java | 324 +
.../jstorm/ui/model/pages/ComponentPage.java | 473 +
.../alibaba/jstorm/ui/model/pages/ConfPage.java | 167 +
.../alibaba/jstorm/ui/model/pages/HomePage.java | 182 +
.../jstorm/ui/model/pages/JStackPage.java | 135 +
.../jstorm/ui/model/pages/ListLogsPage.java | 232 +
.../alibaba/jstorm/ui/model/pages/LogPage.java | 300 +
.../jstorm/ui/model/pages/NettyPage.java | 131 +
.../jstorm/ui/model/pages/SupervisorPage.java | 281 +
.../jstorm/ui/model/pages/TablePage.java | 154 +
.../jstorm/ui/model/pages/TopologyPage.java | 211 +
.../jstorm/ui/model/pages/WindowTablePage.java | 105 +
jstorm-ui/src/main/resources/defaults.yaml | 235 -
jstorm-ui/src/main/resources/log4j.properties | 0
jstorm-ui/src/main/resources/logback-test.xml | 49 +
jstorm-ui/src/main/webapp/META-INF/context.xml | 0
.../src/main/webapp/WEB-INF/faces-config.xml | 0
jstorm-ui/src/main/webapp/WEB-INF/web.xml | 0
jstorm-ui/src/main/webapp/assets/css/aloha.css | 25 +
.../main/webapp/assets/css/bootstrap-theme.css | 476 +
.../webapp/assets/css/bootstrap-theme.css.map | 1 +
.../webapp/assets/css/bootstrap-theme.min.css | 5 +
.../src/main/webapp/assets/css/bootstrap.css | 6584 ++++
.../main/webapp/assets/css/bootstrap.css.map | 1 +
.../main/webapp/assets/css/bootstrap.min.css | 5 +
.../fonts/glyphicons-halflings-regular.eot | Bin 0 -> 20127 bytes
.../fonts/glyphicons-halflings-regular.svg | 288 +
.../fonts/glyphicons-halflings-regular.ttf | Bin 0 -> 45404 bytes
.../fonts/glyphicons-halflings-regular.woff | Bin 0 -> 23424 bytes
.../fonts/glyphicons-halflings-regular.woff2 | Bin 0 -> 18028 bytes
.../src/main/webapp/assets/js/bootstrap.js | 2317 ++
.../src/main/webapp/assets/js/bootstrap.min.js | 7 +
.../src/main/webapp/assets/js/jquery.min.js | 5 +
jstorm-ui/src/main/webapp/assets/js/npm.js | 13 +
jstorm-ui/src/main/webapp/bolt.xhtml | 402 -
jstorm-ui/src/main/webapp/cluster.xhtml | 274 -
jstorm-ui/src/main/webapp/clusters.xhtml | 137 +-
jstorm-ui/src/main/webapp/conf.xhtml | 26 -
jstorm-ui/src/main/webapp/jstack.xhtml | 29 -
jstorm-ui/src/main/webapp/listlog.xhtml | 104 -
jstorm-ui/src/main/webapp/log.xhtml | 127 +-
jstorm-ui/src/main/webapp/spout.xhtml | 400 -
jstorm-ui/src/main/webapp/supervisor.xhtml | 258 -
jstorm-ui/src/main/webapp/table.xhtml | 89 +
jstorm-ui/src/main/webapp/task.xhtml | 92 -
jstorm-ui/src/main/webapp/topology.xhtml | 360 -
jstorm-ui/src/main/webapp/windowtable.xhtml | 130 +
other/genthrift.sh | 11 +-
other/storm.thrift | 236 +-
other/storm.thrift.bak | 343 +
pom.xml | 173 +-
release.xml | 22 +-
1819 files changed, 182933 insertions(+), 134886 deletions(-)
----------------------------------------------------------------------
[27/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleDeserializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleDeserializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleDeserializer.java
deleted file mode 100644
index 12d9193..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleDeserializer.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.task.GeneralTopologyContext;
-import backtype.storm.tuple.MessageId;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.TupleImplExt;
-
-import com.esotericsoftware.kryo.io.Input;
-
-import java.io.IOException;
-import java.net.URLClassLoader;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-public class KryoTupleDeserializer implements ITupleDeserializer {
- private static final Logger LOG = Logger.getLogger(KryoTupleDeserializer.class);
-
- public static final boolean USE_RAW_PACKET = true;
-
- GeneralTopologyContext _context;
- KryoValuesDeserializer _kryo;
- SerializationFactory.IdDictionary _ids;
- Input _kryoInput;
-
- public KryoTupleDeserializer(final Map conf,
- final GeneralTopologyContext context) {
- _kryo = new KryoValuesDeserializer(conf);
- _context = context;
- _ids = new SerializationFactory.IdDictionary(context.getRawTopology());
- _kryoInput = new Input(1);
- }
-
- public Tuple deserialize(byte[] ser) {
-
- int targetTaskId = 0;
- int taskId = 0;
- int streamId = 0;
- String componentName = null;
- String streamName = null;
- MessageId id = null;
-
- try {
-
- _kryoInput.setBuffer(ser);
-
- targetTaskId = _kryoInput.readInt();
- taskId = _kryoInput.readInt(true);
- streamId = _kryoInput.readInt(true);
- componentName = _context.getComponentId(taskId);
- streamName = _ids.getStreamName(componentName, streamId);
- id = MessageId.deserialize(_kryoInput);
- List<Object> values = _kryo.deserializeFrom(_kryoInput);
- TupleImplExt tuple = new TupleImplExt(_context, values, taskId,
- streamName, id);
- tuple.setTargetTaskId(targetTaskId);
- return tuple;
- } catch (Throwable e) {
- StringBuilder sb = new StringBuilder();
-
- sb.append("Deserialize error:");
- sb.append("targetTaskId:").append(targetTaskId);
- sb.append(",taskId:").append(taskId);
- sb.append(",streamId:").append(streamId);
- sb.append(",componentName:").append(componentName);
- sb.append(",streamName:").append(streamName);
- sb.append(",MessageId").append(id);
-
- LOG.info(sb.toString(), e );
- throw new RuntimeException(e);
- }
- }
-
- /**
- * just get target taskId
- *
- * @param ser
- * @return
- */
- public static int deserializeTaskId(byte[] ser) {
- Input _kryoInput = new Input(1);
-
- _kryoInput.setBuffer(ser);
-
- int targetTaskId = _kryoInput.readInt();
-
- return targetTaskId;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleSerializer.java
deleted file mode 100644
index e04b145..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/KryoTupleSerializer.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.task.GeneralTopologyContext;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.TupleExt;
-
-import com.esotericsoftware.kryo.io.Output;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Map;
-
-public class KryoTupleSerializer implements ITupleSerializer {
- KryoValuesSerializer _kryo;
- SerializationFactory.IdDictionary _ids;
- Output _kryoOut;
-
- public KryoTupleSerializer(final Map conf,
- final GeneralTopologyContext context) {
- _kryo = new KryoValuesSerializer(conf);
- _kryoOut = new Output(2000, 2000000000);
- _ids = new SerializationFactory.IdDictionary(context.getRawTopology());
- }
-
- /**
- * @@@ in the furture, it will skill serialize 'targetTask' through check
- * some flag
- * @see backtype.storm.serialization.ITupleSerializer#serialize(int,
- * backtype.storm.tuple.Tuple)
- */
- public byte[] serialize(Tuple tuple) {
- try {
-
- _kryoOut.clear();
- if (tuple instanceof TupleExt) {
- _kryoOut.writeInt(((TupleExt) tuple).getTargetTaskId());
- }
-
- _kryoOut.writeInt(tuple.getSourceTask(), true);
- _kryoOut.writeInt(
- _ids.getStreamId(tuple.getSourceComponent(),
- tuple.getSourceStreamId()), true);
- tuple.getMessageId().serialize(_kryoOut);
- _kryo.serializeInto(tuple.getValues(), _kryoOut);
- return _kryoOut.toBytes();
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static byte[] serialize(int targetTask) {
- ByteBuffer buff = ByteBuffer.allocate((Integer.SIZE / 8));
- buff.putInt(targetTask);
- byte[] rtn = buff.array();
- return rtn;
- }
-
- // public long crc32(Tuple tuple) {
- // try {
- // CRC32OutputStream hasher = new CRC32OutputStream();
- // _kryo.serializeInto(tuple.getValues(), hasher);
- // return hasher.getValue();
- // } catch (IOException e) {
- // throw new RuntimeException(e);
- // }
- // }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesDeserializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesDeserializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesDeserializer.java
deleted file mode 100644
index c1f3a80..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesDeserializer.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package backtype.storm.serialization;
-
-import java.io.IOException;
-import java.net.URLClassLoader;
-import java.util.List;
-import java.util.Map;
-
-import backtype.storm.utils.ListDelegate;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-
-public class KryoValuesDeserializer {
-
- Kryo _kryo;
- Input _kryoInput;
-
- public KryoValuesDeserializer(Map conf) {
- this._kryo = SerializationFactory.getKryo(conf);
- this._kryoInput = new Input(1);
- }
-
- public List<Object> deserializeFrom(Input input) {
- ListDelegate delegate = (ListDelegate) _kryo.readObject(input,
- ListDelegate.class);
- return delegate.getDelegate();
- }
-
- public List<Object> deserialize(byte[] ser) throws IOException {
- _kryoInput.setBuffer(ser);
- return deserializeFrom(_kryoInput);
- }
-
- public Object deserializeObject(byte[] ser) throws IOException {
- _kryoInput.setBuffer(ser);
- return _kryo.readClassAndObject(_kryoInput);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesSerializer.java
deleted file mode 100644
index 6072282..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/KryoValuesSerializer.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.utils.ListDelegate;
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Output;
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-public class KryoValuesSerializer {
- Kryo _kryo;
- ListDelegate _delegate;
- Output _kryoOut;
-
- public KryoValuesSerializer(Map conf) {
- _kryo = SerializationFactory.getKryo(conf);
- _delegate = new ListDelegate();
- _kryoOut = new Output(2000, 2000000000);
- }
-
- public void serializeInto(List<Object> values, Output out)
- throws IOException {
- // this ensures that list of values is always written the same way,
- // regardless
- // of whether it's a java collection or one of clojure's persistent
- // collections
- // (which have different serializers)
- // Doing this lets us deserialize as ArrayList and avoid writing the
- // class here
- _delegate.setDelegate(values);
- _kryo.writeObject(out, _delegate);
- }
-
- public byte[] serialize(List<Object> values) throws IOException {
- _kryoOut.clear();
- serializeInto(values, _kryoOut);
- return _kryoOut.toBytes();
- }
-
- public byte[] serializeObject(Object obj) {
- _kryoOut.clear();
- _kryo.writeClassAndObject(_kryoOut, obj);
- return _kryoOut.toBytes();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/SerializableSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/SerializableSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/SerializableSerializer.java
deleted file mode 100644
index 4fcaf02..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/SerializableSerializer.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm.serialization;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.Serializer;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-
-import org.apache.commons.io.input.ClassLoaderObjectInputStream;
-
-public class SerializableSerializer extends Serializer<Object> {
-
- @Override
- public void write(Kryo kryo, Output output, Object object) {
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- try {
- ObjectOutputStream oos = new ObjectOutputStream(bos);
- oos.writeObject(object);
- oos.flush();
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- byte[] ser = bos.toByteArray();
- output.writeInt(ser.length);
- output.writeBytes(ser);
- }
-
- @Override
- public Object read(Kryo kryo, Input input, Class c) {
- int len = input.readInt();
- byte[] ser = new byte[len];
- input.readBytes(ser);
- ByteArrayInputStream bis = new ByteArrayInputStream(ser);
- try {
- ClassLoaderObjectInputStream ois = new ClassLoaderObjectInputStream(
- kryo.getClassLoader(), bis);
- return ois.readObject();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/SerializationFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/SerializationFactory.java b/jstorm-client/src/main/java/backtype/storm/serialization/SerializationFactory.java
deleted file mode 100644
index 88f7803..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/SerializationFactory.java
+++ /dev/null
@@ -1,242 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.Config;
-import backtype.storm.generated.ComponentCommon;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.serialization.types.ArrayListSerializer;
-import backtype.storm.serialization.types.ListDelegateSerializer;
-import backtype.storm.serialization.types.HashMapSerializer;
-import backtype.storm.serialization.types.HashSetSerializer;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.ListDelegate;
-import backtype.storm.utils.Utils;
-import backtype.storm.utils.WorkerClassLoader;
-import carbonite.JavaBridge;
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.Serializer;
-import com.esotericsoftware.kryo.serializers.DefaultSerializers.BigIntegerSerializer;
-import java.math.BigInteger;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SerializationFactory {
- public static final Logger LOG = LoggerFactory
- .getLogger(SerializationFactory.class);
-
- public static Kryo getKryo(Map conf) {
- IKryoFactory kryoFactory = (IKryoFactory) Utils
- .newInstance((String) conf.get(Config.TOPOLOGY_KRYO_FACTORY));
- Kryo k = kryoFactory.getKryo(conf);
- if (WorkerClassLoader.getInstance() != null)
- k.setClassLoader(WorkerClassLoader.getInstance());
- k.register(byte[].class);
-
- /* tuple payload serializer is specified via configuration */
- String payloadSerializerName = (String) conf
- .get(Config.TOPOLOGY_TUPLE_SERIALIZER);
- try {
- Class serializerClass = Class.forName(
- payloadSerializerName, true, k.getClassLoader());
- Serializer serializer = resolveSerializerInstance(k,
- ListDelegate.class, serializerClass, conf);
- k.register(ListDelegate.class, serializer);
- } catch (ClassNotFoundException ex) {
- throw new RuntimeException(ex);
- }
-
- k.register(ArrayList.class, new ArrayListSerializer());
- k.register(HashMap.class, new HashMapSerializer());
- k.register(HashSet.class, new HashSetSerializer());
- k.register(BigInteger.class, new BigIntegerSerializer());
- k.register(TransactionAttempt.class);
- k.register(Values.class);
- k.register(backtype.storm.metric.api.IMetricsConsumer.DataPoint.class);
- k.register(backtype.storm.metric.api.IMetricsConsumer.TaskInfo.class);
- try {
- JavaBridge.registerPrimitives(k);
- JavaBridge.registerCollections(k);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
-
- Map<String, String> registrations = normalizeKryoRegister(conf);
-
- kryoFactory.preRegister(k, conf);
-
- boolean skipMissing = (Boolean) conf
- .get(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS);
- for (String klassName : registrations.keySet()) {
- String serializerClassName = registrations.get(klassName);
- try {
- Class klass = Class.forName(
- klassName, true, k.getClassLoader());
-
- Class serializerClass = null;
- if (serializerClassName != null)
- serializerClass = Class.forName(
- serializerClassName, true, k.getClassLoader());
- if (serializerClass == null) {
- k.register(klass);
- } else {
- k.register(
- klass,
- resolveSerializerInstance(k, klass,
- serializerClass, conf));
- }
- } catch (ClassNotFoundException e) {
- if (skipMissing) {
- LOG.info("Could not find serialization or class for "
- + serializerClassName
- + ". Skipping registration...");
- } else {
- throw new RuntimeException(e);
- }
- }
- }
-
- kryoFactory.postRegister(k, conf);
-
- if (conf.get(Config.TOPOLOGY_KRYO_DECORATORS) != null) {
- for (String klassName : (List<String>) conf
- .get(Config.TOPOLOGY_KRYO_DECORATORS)) {
- try {
- Class klass = Class.forName(
- klassName, true, k.getClassLoader());
- IKryoDecorator decorator = (IKryoDecorator) klass
- .newInstance();
- decorator.decorate(k);
- } catch (ClassNotFoundException e) {
- if (skipMissing) {
- LOG.info("Could not find kryo decorator named "
- + klassName + ". Skipping registration...");
- } else {
- throw new RuntimeException(e);
- }
- } catch (InstantiationException e) {
- throw new RuntimeException(e);
- } catch (IllegalAccessException e) {
- throw new RuntimeException(e);
- }
- }
- }
-
- kryoFactory.postDecorate(k, conf);
-
- return k;
- }
-
- public static class IdDictionary {
- Map<String, Map<String, Integer>> streamNametoId = new HashMap<String, Map<String, Integer>>();
- Map<String, Map<Integer, String>> streamIdToName = new HashMap<String, Map<Integer, String>>();
-
- public IdDictionary(StormTopology topology) {
- List<String> componentNames = new ArrayList<String>(topology
- .get_spouts().keySet());
- componentNames.addAll(topology.get_bolts().keySet());
- componentNames.addAll(topology.get_state_spouts().keySet());
-
- for (String name : componentNames) {
- ComponentCommon common = Utils.getComponentCommon(topology,
- name);
- List<String> streams = new ArrayList<String>(common
- .get_streams().keySet());
- streamNametoId.put(name, idify(streams));
- streamIdToName.put(name,
- Utils.reverseMap(streamNametoId.get(name)));
- }
- }
-
- public int getStreamId(String component, String stream) {
- return streamNametoId.get(component).get(stream);
- }
-
- public String getStreamName(String component, int stream) {
- return streamIdToName.get(component).get(stream);
- }
-
- private static Map<String, Integer> idify(List<String> names) {
- Collections.sort(names);
- Map<String, Integer> ret = new HashMap<String, Integer>();
- int i = 1;
- for (String name : names) {
- ret.put(name, i);
- i++;
- }
- return ret;
- }
- }
-
- private static Serializer resolveSerializerInstance(Kryo k,
- Class superClass, Class<? extends Serializer> serializerClass,
- Map conf) {
- try {
- try {
- return serializerClass.getConstructor(Kryo.class, Class.class,
- Map.class).newInstance(k, superClass, conf);
- } catch (Exception ex1) {
- try {
- return serializerClass.getConstructor(Kryo.class,
- Class.class).newInstance(k, superClass);
- } catch (Exception ex2) {
- try {
- return serializerClass.getConstructor(Kryo.class,
- Map.class).newInstance(k, conf);
- } catch (Exception ex3) {
- try {
- return serializerClass.getConstructor(Kryo.class)
- .newInstance(k);
- } catch (Exception ex4) {
- try {
- return serializerClass.getConstructor(
- Class.class, Map.class).newInstance(
- superClass, conf);
- } catch (Exception ex5) {
- try {
- return serializerClass.getConstructor(
- Class.class)
- .newInstance(superClass);
- } catch (Exception ex6) {
- return serializerClass.newInstance();
- }
- }
- }
- }
- }
- }
- } catch (Exception ex) {
- throw new IllegalArgumentException("Unable to create serializer \""
- + serializerClass.getName() + "\" for class: "
- + superClass.getName(), ex);
- }
- }
-
- private static Map<String, String> normalizeKryoRegister(Map conf) {
- // TODO: de-duplicate this logic with the code in nimbus
- Object res = conf.get(Config.TOPOLOGY_KRYO_REGISTER);
- if (res == null)
- return new TreeMap<String, String>();
- Map<String, String> ret = new HashMap<String, String>();
- if (res instanceof Map) {
- ret = (Map<String, String>) res;
- } else {
- for (Object o : (List) res) {
- if (o instanceof Map) {
- ret.putAll((Map) o);
- } else {
- ret.put((String) o, null);
- }
- }
- }
-
- // ensure always same order for registrations with TreeMap
- return new TreeMap<String, String>(ret);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/types/ArrayListSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/types/ArrayListSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/types/ArrayListSerializer.java
deleted file mode 100644
index e403a95..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/types/ArrayListSerializer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.serialization.types;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.serializers.CollectionSerializer;
-import java.util.ArrayList;
-import java.util.Collection;
-
-public class ArrayListSerializer extends CollectionSerializer {
- @Override
- public Collection create(Kryo kryo, Input input, Class<Collection> type) {
- return new ArrayList();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/types/HashMapSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/types/HashMapSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/types/HashMapSerializer.java
deleted file mode 100644
index c1f7456..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/types/HashMapSerializer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.serialization.types;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.serializers.MapSerializer;
-import java.util.HashMap;
-import java.util.Map;
-
-public class HashMapSerializer extends MapSerializer {
- @Override
- public Map create(Kryo kryo, Input input, Class<Map> type) {
- return new HashMap();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/types/HashSetSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/types/HashSetSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/types/HashSetSerializer.java
deleted file mode 100644
index b28bbd6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/types/HashSetSerializer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.serialization.types;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.serializers.CollectionSerializer;
-import java.util.Collection;
-import java.util.HashSet;
-
-public class HashSetSerializer extends CollectionSerializer {
- @Override
- public Collection create(Kryo kryo, Input input, Class<Collection> type) {
- return new HashSet();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/types/ListDelegateSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/types/ListDelegateSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/types/ListDelegateSerializer.java
deleted file mode 100644
index 67242a2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/types/ListDelegateSerializer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.serialization.types;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.serializers.CollectionSerializer;
-import backtype.storm.utils.ListDelegate;
-import java.util.Collection;
-
-public class ListDelegateSerializer extends CollectionSerializer {
- @Override
- public Collection create(Kryo kryo, Input input, Class<Collection> type) {
- return new ListDelegate();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/IMultiSchemableSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/IMultiSchemableSpout.java b/jstorm-client/src/main/java/backtype/storm/spout/IMultiSchemableSpout.java
deleted file mode 100644
index ba31324..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/IMultiSchemableSpout.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.spout;
-
-public interface IMultiSchemableSpout {
- MultiScheme getScheme();
-
- void setScheme(MultiScheme scheme);
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/ISchemableSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/ISchemableSpout.java b/jstorm-client/src/main/java/backtype/storm/spout/ISchemableSpout.java
deleted file mode 100644
index 5bbc869..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/ISchemableSpout.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.spout;
-
-public interface ISchemableSpout {
- Scheme getScheme();
-
- void setScheme(Scheme scheme);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/ISpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/ISpout.java b/jstorm-client/src/main/java/backtype/storm/spout/ISpout.java
deleted file mode 100644
index 67f94f3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/ISpout.java
+++ /dev/null
@@ -1,116 +0,0 @@
-package backtype.storm.spout;
-
-import backtype.storm.task.TopologyContext;
-import java.util.Map;
-import java.io.Serializable;
-
-/**
- * ISpout is the core interface for implementing spouts. A Spout is responsible
- * for feeding messages into the topology for processing. For every tuple
- * emitted by a spout, Storm will track the (potentially very large) DAG of
- * tuples generated based on a tuple emitted by the spout. When Storm detects
- * that every tuple in that DAG has been successfully processed, it will send an
- * ack message to the Spout.
- *
- * <p>
- * If a tuple fails to be fully process within the configured timeout for the
- * topology (see {@link backtype.storm.Config}), Storm will send a fail message
- * to the spout for the message.
- * </p>
- *
- * <p>
- * When a Spout emits a tuple, it can tag the tuple with a message id. The
- * message id can be any type. When Storm acks or fails a message, it will pass
- * back to the spout the same message id to identify which tuple it's referring
- * to. If the spout leaves out the message id, or sets it to null, then Storm
- * will not track the message and the spout will not receive any ack or fail
- * callbacks for the message.
- * </p>
- *
- * <p>
- * Storm executes ack, fail, and nextTuple all on the same thread. This means
- * that an implementor of an ISpout does not need to worry about concurrency
- * issues between those methods. However, it also means that an implementor must
- * ensure that nextTuple is non-blocking: otherwise the method could block acks
- * and fails that are pending to be processed.
- * </p>
- */
-public interface ISpout extends Serializable {
- /**
- * Called when a task for this component is initialized within a worker on
- * the cluster. It provides the spout with the environment in which the
- * spout executes.
- *
- * <p>
- * This includes the:
- * </p>
- *
- * @param conf
- * The Storm configuration for this spout. This is the
- * configuration provided to the topology merged in with cluster
- * configuration on this machine.
- * @param context
- * This object can be used to get information about this task's
- * place within the topology, including the task id and component
- * id of this task, input and output information, etc.
- * @param collector
- * The collector is used to emit tuples from this spout. Tuples
- * can be emitted at any time, including the open and close
- * methods. The collector is thread-safe and should be saved as
- * an instance variable of this spout object.
- */
- void open(Map conf, TopologyContext context, SpoutOutputCollector collector);
-
- /**
- * Called when an ISpout is going to be shutdown. There is no guarentee that
- * close will be called, because the supervisor kill -9's worker processes
- * on the cluster.
- *
- * <p>
- * The one context where close is guaranteed to be called is a topology is
- * killed when running Storm in local mode.
- * </p>
- */
- void close();
-
- /**
- * Called when a spout has been activated out of a deactivated mode.
- * nextTuple will be called on this spout soon. A spout can become activated
- * after having been deactivated when the topology is manipulated using the
- * `storm` client.
- */
- void activate();
-
- /**
- * Called when a spout has been deactivated. nextTuple will not be called
- * while a spout is deactivated. The spout may or may not be reactivated in
- * the future.
- */
- void deactivate();
-
- /**
- * When this method is called, Storm is requesting that the Spout emit
- * tuples to the output collector. This method should be non-blocking, so if
- * the Spout has no tuples to emit, this method should return. nextTuple,
- * ack, and fail are all called in a tight loop in a single thread in the
- * spout task. When there are no tuples to emit, it is courteous to have
- * nextTuple sleep for a short amount of time (like a single millisecond) so
- * as not to waste too much CPU.
- */
- void nextTuple();
-
- /**
- * Storm has determined that the tuple emitted by this spout with the msgId
- * identifier has been fully processed. Typically, an implementation of this
- * method will take that message off the queue and prevent it from being
- * replayed.
- */
- void ack(Object msgId);
-
- /**
- * The tuple emitted by this spout with the msgId identifier has failed to
- * be fully processed. Typically, an implementation of this method will put
- * that message back on the queue to be replayed at a later time.
- */
- void fail(Object msgId);
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/ISpoutOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/ISpoutOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/spout/ISpoutOutputCollector.java
deleted file mode 100644
index 6b66b00..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/ISpoutOutputCollector.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.List;
-
-public interface ISpoutOutputCollector {
- /**
- * Returns the task ids that received the tuples.
- */
- List<Integer> emit(String streamId, List<Object> tuple, Object messageId);
-
- void emitDirect(int taskId, String streamId, List<Object> tuple,
- Object messageId);
-
- void reportError(Throwable error);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/ISpoutWaitStrategy.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/ISpoutWaitStrategy.java b/jstorm-client/src/main/java/backtype/storm/spout/ISpoutWaitStrategy.java
deleted file mode 100644
index f5a25b3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/ISpoutWaitStrategy.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.Map;
-
-/**
- * The strategy a spout needs to use when its waiting. Waiting is triggered in
- * one of two conditions:
- *
- * 1. nextTuple emits no tuples 2. The spout has hit maxSpoutPending and can't
- * emit any more tuples
- *
- * The default strategy sleeps for one millisecond.
- */
-public interface ISpoutWaitStrategy {
- void prepare(Map conf);
-
- void emptyEmit(long streak);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/MultiScheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/MultiScheme.java b/jstorm-client/src/main/java/backtype/storm/spout/MultiScheme.java
deleted file mode 100644
index e67d036..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/MultiScheme.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.List;
-import java.io.Serializable;
-
-import backtype.storm.tuple.Fields;
-
-public interface MultiScheme extends Serializable {
- public Iterable<List<Object>> deserialize(byte[] ser);
-
- public Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/NothingEmptyEmitStrategy.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/NothingEmptyEmitStrategy.java b/jstorm-client/src/main/java/backtype/storm/spout/NothingEmptyEmitStrategy.java
deleted file mode 100644
index c084b10..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/NothingEmptyEmitStrategy.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.Map;
-
-public class NothingEmptyEmitStrategy implements ISpoutWaitStrategy {
- @Override
- public void emptyEmit(long streak) {
- }
-
- @Override
- public void prepare(Map conf) {
- throw new UnsupportedOperationException("Not supported yet.");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/RawMultiScheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/RawMultiScheme.java b/jstorm-client/src/main/java/backtype/storm/spout/RawMultiScheme.java
deleted file mode 100644
index 2446b45..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/RawMultiScheme.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.List;
-
-import backtype.storm.tuple.Fields;
-
-import static backtype.storm.utils.Utils.tuple;
-import static java.util.Arrays.asList;
-
-public class RawMultiScheme implements MultiScheme {
- @Override
- public Iterable<List<Object>> deserialize(byte[] ser) {
- return asList(tuple(ser));
- }
-
- @Override
- public Fields getOutputFields() {
- return new Fields("bytes");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/RawScheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/RawScheme.java b/jstorm-client/src/main/java/backtype/storm/spout/RawScheme.java
deleted file mode 100644
index 46e9d1c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/RawScheme.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.spout;
-
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import static backtype.storm.utils.Utils.tuple;
-
-public class RawScheme implements Scheme {
- public List<Object> deserialize(byte[] ser) {
- return tuple(ser);
- }
-
- public Fields getOutputFields() {
- return new Fields("bytes");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/Scheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/Scheme.java b/jstorm-client/src/main/java/backtype/storm/spout/Scheme.java
deleted file mode 100644
index 26bf3ae..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/Scheme.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package backtype.storm.spout;
-
-import backtype.storm.tuple.Fields;
-import java.io.Serializable;
-import java.util.List;
-
-public interface Scheme extends Serializable {
- public List<Object> deserialize(byte[] ser);
-
- public Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/SchemeAsMultiScheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/SchemeAsMultiScheme.java b/jstorm-client/src/main/java/backtype/storm/spout/SchemeAsMultiScheme.java
deleted file mode 100644
index cc80ef9..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/SchemeAsMultiScheme.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.spout;
-
-import java.util.Arrays;
-import java.util.List;
-
-import backtype.storm.tuple.Fields;
-
-public class SchemeAsMultiScheme implements MultiScheme {
- public final Scheme scheme;
-
- public SchemeAsMultiScheme(Scheme scheme) {
- this.scheme = scheme;
- }
-
- @Override
- public Iterable<List<Object>> deserialize(final byte[] ser) {
- List<Object> o = scheme.deserialize(ser);
- if (o == null)
- return null;
- else
- return Arrays.asList(o);
- }
-
- @Override
- public Fields getOutputFields() {
- return scheme.getOutputFields();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/ShellSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/ShellSpout.java b/jstorm-client/src/main/java/backtype/storm/spout/ShellSpout.java
deleted file mode 100644
index a8e18dc..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/ShellSpout.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.spout;
-
-import backtype.storm.Config;
-import backtype.storm.generated.ShellComponent;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.metric.api.rpc.IShellMetric;
-import backtype.storm.multilang.ShellMsg;
-import backtype.storm.multilang.SpoutMsg;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.utils.ShellProcess;
-import java.util.Map;
-import java.util.List;
-import java.util.TimerTask;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledThreadPoolExecutor;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicLong;
-
-import clojure.lang.RT;
-import com.google.common.util.concurrent.MoreExecutors;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-public class ShellSpout implements ISpout {
- public static Logger LOG = LoggerFactory.getLogger(ShellSpout.class);
-
- private SpoutOutputCollector _collector;
- private String[] _command;
- private ShellProcess _process;
-
- private TopologyContext _context;
-
- private SpoutMsg _spoutMsg;
-
- private int workerTimeoutMills;
- private ScheduledExecutorService heartBeatExecutorService;
- private AtomicLong lastHeartbeatTimestamp = new AtomicLong();
-
- public ShellSpout(ShellComponent component) {
- this(component.get_execution_command(), component.get_script());
- }
-
- public ShellSpout(String... command) {
- _command = command;
- }
-
- public void open(Map stormConf, TopologyContext context,
- SpoutOutputCollector collector) {
- _collector = collector;
- _context = context;
-
- workerTimeoutMills = 1000 * RT.intCast(stormConf.get(Config.SUPERVISOR_WORKER_TIMEOUT_SECS));
-
- _process = new ShellProcess(_command);
-
- Number subpid = _process.launch(stormConf, context);
- LOG.info("Launched subprocess with pid " + subpid);
-
- heartBeatExecutorService = MoreExecutors.getExitingScheduledExecutorService(new ScheduledThreadPoolExecutor(1));
- }
-
- public void close() {
- heartBeatExecutorService.shutdownNow();
- _process.destroy();
- }
-
- public void nextTuple() {
- if (_spoutMsg == null) {
- _spoutMsg = new SpoutMsg();
- }
- _spoutMsg.setCommand("next");
- _spoutMsg.setId("");
- querySubprocess();
- }
-
- public void ack(Object msgId) {
- if (_spoutMsg == null) {
- _spoutMsg = new SpoutMsg();
- }
- _spoutMsg.setCommand("ack");
- _spoutMsg.setId(msgId);
- querySubprocess();
- }
-
- public void fail(Object msgId) {
- if (_spoutMsg == null) {
- _spoutMsg = new SpoutMsg();
- }
- _spoutMsg.setCommand("fail");
- _spoutMsg.setId(msgId);
- querySubprocess();
- }
-
- private void handleMetrics(ShellMsg shellMsg) {
- //get metric name
- String name = shellMsg.getMetricName();
- if (name.isEmpty()) {
- throw new RuntimeException("Receive Metrics name is empty");
- }
-
- //get metric by name
- IMetric iMetric = _context.getRegisteredMetricByName(name);
- if (iMetric == null) {
- throw new RuntimeException("Could not find metric by name["+name+"] ");
- }
- if ( !(iMetric instanceof IShellMetric)) {
- throw new RuntimeException("Metric["+name+"] is not IShellMetric, can not call by RPC");
- }
- IShellMetric iShellMetric = (IShellMetric)iMetric;
-
- //call updateMetricFromRPC with params
- Object paramsObj = shellMsg.getMetricParams();
- try {
- iShellMetric.updateMetricFromRPC(paramsObj);
- } catch (RuntimeException re) {
- throw re;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private void querySubprocess() {
- try {
- _process.writeSpoutMsg(_spoutMsg);
-
- while (true) {
- ShellMsg shellMsg = _process.readShellMsg();
- String command = shellMsg.getCommand();
- if (command == null) {
- throw new IllegalArgumentException("Command not found in spout message: " + shellMsg);
- }
-
- setHeartbeat();
-
- if (command.equals("sync")) {
- return;
- } else if (command.equals("log")) {
- handleLog(shellMsg);
- } else if (command.equals("emit")) {
- String stream = shellMsg.getStream();
- Long task = shellMsg.getTask();
- List<Object> tuple = shellMsg.getTuple();
- Object messageId = shellMsg.getId();
- if (task == 0) {
- List<Integer> outtasks = _collector.emit(stream, tuple, messageId);
- if (shellMsg.areTaskIdsNeeded()) {
- _process.writeTaskIds(outtasks);
- }
- } else {
- _collector.emitDirect((int) task.longValue(), stream, tuple, messageId);
- }
- } else if (command.equals("metrics")) {
- handleMetrics(shellMsg);
- } else {
- throw new RuntimeException("Unknown command received: " + command);
- }
- }
- } catch (Exception e) {
- String processInfo = _process.getProcessInfoString() + _process.getProcessTerminationInfoString();
- throw new RuntimeException(processInfo, e);
- }
- }
-
- private void handleLog(ShellMsg shellMsg) {
- String msg = shellMsg.getMsg();
- msg = "ShellLog " + _process.getProcessInfoString() + " " + msg;
- ShellMsg.ShellLogLevel logLevel = shellMsg.getLogLevel();
-
- switch (logLevel) {
- case TRACE:
- LOG.trace(msg);
- break;
- case DEBUG:
- LOG.debug(msg);
- break;
- case INFO:
- LOG.info(msg);
- break;
- case WARN:
- LOG.warn(msg);
- break;
- case ERROR:
- LOG.error(msg);
- break;
- default:
- LOG.info(msg);
- break;
- }
- }
-
- @Override
- public void activate() {
- LOG.info("Start checking heartbeat...");
- // prevent timer to check heartbeat based on last thing before activate
- setHeartbeat();
- heartBeatExecutorService.scheduleAtFixedRate(new SpoutHeartbeatTimerTask(this), 1, 1, TimeUnit.SECONDS);
- }
-
- @Override
- public void deactivate() {
- heartBeatExecutorService.shutdownNow();
- }
-
- private void setHeartbeat() {
- lastHeartbeatTimestamp.set(System.currentTimeMillis());
- }
-
- private long getLastHeartbeat() {
- return lastHeartbeatTimestamp.get();
- }
-
- private void die(Throwable exception) {
- heartBeatExecutorService.shutdownNow();
-
- LOG.error("Halting process: ShellSpout died.", exception);
- _collector.reportError(exception);
- _process.destroy();
- System.exit(11);
- }
-
- private class SpoutHeartbeatTimerTask extends TimerTask {
- private ShellSpout spout;
-
- public SpoutHeartbeatTimerTask(ShellSpout spout) {
- this.spout = spout;
- }
-
- @Override
- public void run() {
- long currentTimeMillis = System.currentTimeMillis();
- long lastHeartbeat = getLastHeartbeat();
-
- LOG.debug("current time : {}, last heartbeat : {}, worker timeout (ms) : {}",
- currentTimeMillis, lastHeartbeat, workerTimeoutMills);
-
- if (currentTimeMillis - lastHeartbeat > workerTimeoutMills) {
- spout.die(new RuntimeException("subprocess heartbeat timeout"));
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/SleepSpoutWaitStrategy.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/SleepSpoutWaitStrategy.java b/jstorm-client/src/main/java/backtype/storm/spout/SleepSpoutWaitStrategy.java
deleted file mode 100644
index 0aa7f64..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/SleepSpoutWaitStrategy.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package backtype.storm.spout;
-
-import backtype.storm.Config;
-import java.util.Map;
-
-public class SleepSpoutWaitStrategy implements ISpoutWaitStrategy {
-
- long sleepMillis;
-
- @Override
- public void prepare(Map conf) {
- sleepMillis = ((Number) conf
- .get(Config.TOPOLOGY_SLEEP_SPOUT_WAIT_STRATEGY_TIME_MS))
- .longValue();
- }
-
- @Override
- public void emptyEmit(long streak) {
- try {
- Thread.sleep(sleepMillis);
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/spout/SpoutOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/spout/SpoutOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/spout/SpoutOutputCollector.java
deleted file mode 100644
index 069fb99..0000000
--- a/jstorm-client/src/main/java/backtype/storm/spout/SpoutOutputCollector.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package backtype.storm.spout;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.utils.Utils;
-import java.util.List;
-
-/**
- * This output collector exposes the API for emitting tuples from an
- * {@link backtype.storm.topology.IRichSpout}. The main difference between this
- * output collector and {@link OutputCollector} for
- * {@link backtype.storm.topology.IRichBolt} is that spouts can tag messages
- * with ids so that they can be acked or failed later on. This is the Spout
- * portion of Storm's API to guarantee that each message is fully processed at
- * least once.
- */
-public class SpoutOutputCollector implements ISpoutOutputCollector {
- ISpoutOutputCollector _delegate;
-
- public SpoutOutputCollector(ISpoutOutputCollector delegate) {
- _delegate = delegate;
- }
-
- /**
- * Emits a new tuple to the specified output stream with the given message
- * ID. When Storm detects that this tuple has been fully processed, or has
- * failed to be fully processed, the spout will receive an ack or fail
- * callback respectively with the messageId as long as the messageId was not
- * null. If the messageId was null, Storm will not track the tuple and no
- * callback will be received. The emitted values must be immutable.
- *
- * @return the list of task ids that this tuple was sent to
- */
- public List<Integer> emit(String streamId, List<Object> tuple,
- Object messageId) {
- return _delegate.emit(streamId, tuple, messageId);
- }
-
- /**
- * Emits a new tuple to the default output stream with the given message ID.
- * When Storm detects that this tuple has been fully processed, or has
- * failed to be fully processed, the spout will receive an ack or fail
- * callback respectively with the messageId as long as the messageId was not
- * null. If the messageId was null, Storm will not track the tuple and no
- * callback will be received. The emitted values must be immutable.
- *
- * @return the list of task ids that this tuple was sent to
- */
- public List<Integer> emit(List<Object> tuple, Object messageId) {
- return emit(Utils.DEFAULT_STREAM_ID, tuple, messageId);
- }
-
- /**
- * Emits a tuple to the default output stream with a null message id. Storm
- * will not track this message so ack and fail will never be called for this
- * tuple. The emitted values must be immutable.
- */
- public List<Integer> emit(List<Object> tuple) {
- return emit(tuple, null);
- }
-
- /**
- * Emits a tuple to the specified output stream with a null message id.
- * Storm will not track this message so ack and fail will never be called
- * for this tuple. The emitted values must be immutable.
- */
- public List<Integer> emit(String streamId, List<Object> tuple) {
- return emit(streamId, tuple, null);
- }
-
- /**
- * Emits a tuple to the specified task on the specified output stream. This
- * output stream must have been declared as a direct stream, and the
- * specified task must use a direct grouping on this stream to receive the
- * message. The emitted values must be immutable.
- */
- public void emitDirect(int taskId, String streamId, List<Object> tuple,
- Object messageId) {
- _delegate.emitDirect(taskId, streamId, tuple, messageId);
- }
-
- /**
- * Emits a tuple to the specified task on the default output stream. This
- * output stream must have been declared as a direct stream, and the
- * specified task must use a direct grouping on this stream to receive the
- * message. The emitted values must be immutable.
- */
- public void emitDirect(int taskId, List<Object> tuple, Object messageId) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple, messageId);
- }
-
- /**
- * Emits a tuple to the specified task on the specified output stream. This
- * output stream must have been declared as a direct stream, and the
- * specified task must use a direct grouping on this stream to receive the
- * message. The emitted values must be immutable.
- *
- * <p>
- * Because no message id is specified, Storm will not track this message so
- * ack and fail will never be called for this tuple.
- * </p>
- */
- public void emitDirect(int taskId, String streamId, List<Object> tuple) {
- emitDirect(taskId, streamId, tuple, null);
- }
-
- /**
- * Emits a tuple to the specified task on the default output stream. This
- * output stream must have been declared as a direct stream, and the
- * specified task must use a direct grouping on this stream to receive the
- * message. The emitted values must be immutable.
- *
- * <p>
- * Because no message id is specified, Storm will not track this message so
- * ack and fail will never be called for this tuple.
- * </p>
- */
- public void emitDirect(int taskId, List<Object> tuple) {
- emitDirect(taskId, tuple, null);
- }
-
- @Override
- public void reportError(Throwable error) {
- _delegate.reportError(error);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/IStateSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/IStateSpout.java b/jstorm-client/src/main/java/backtype/storm/state/IStateSpout.java
deleted file mode 100644
index 2c8b300..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/IStateSpout.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.state;
-
-import backtype.storm.task.TopologyContext;
-import java.io.Serializable;
-import java.util.Map;
-
-public interface IStateSpout extends Serializable {
- void open(Map conf, TopologyContext context);
-
- void close();
-
- void nextTuple(StateSpoutOutputCollector collector);
-
- void synchronize(SynchronizeOutputCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/IStateSpoutOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/IStateSpoutOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/state/IStateSpoutOutputCollector.java
deleted file mode 100644
index d26ed6b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/IStateSpoutOutputCollector.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package backtype.storm.state;
-
-public interface IStateSpoutOutputCollector extends ISynchronizeOutputCollector {
- void remove(int streamId, Object id);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/ISubscribedState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/ISubscribedState.java b/jstorm-client/src/main/java/backtype/storm/state/ISubscribedState.java
deleted file mode 100644
index 4256a0a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/ISubscribedState.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.state;
-
-import backtype.storm.tuple.Tuple;
-
-public interface ISubscribedState {
- void set(Object id, Tuple tuple);
-
- void remove(Object id);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/ISynchronizeOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/ISynchronizeOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/state/ISynchronizeOutputCollector.java
deleted file mode 100644
index 97a8a8e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/ISynchronizeOutputCollector.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.state;
-
-import java.util.List;
-
-public interface ISynchronizeOutputCollector {
- void add(int streamId, Object id, List<Object> tuple);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/StateSpoutOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/StateSpoutOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/state/StateSpoutOutputCollector.java
deleted file mode 100644
index 3156e95..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/StateSpoutOutputCollector.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package backtype.storm.state;
-
-public class StateSpoutOutputCollector extends SynchronizeOutputCollector
- implements IStateSpoutOutputCollector {
-
- @Override
- public void remove(int streamId, Object id) {
- throw new UnsupportedOperationException("Not supported yet.");
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/state/SynchronizeOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/state/SynchronizeOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/state/SynchronizeOutputCollector.java
deleted file mode 100644
index 9474fa2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/state/SynchronizeOutputCollector.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.state;
-
-import java.util.List;
-
-public class SynchronizeOutputCollector implements ISynchronizeOutputCollector {
-
- @Override
- public void add(int streamId, Object id, List<Object> tuple) {
- throw new UnsupportedOperationException("Not supported yet.");
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/GeneralTopologyContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/GeneralTopologyContext.java b/jstorm-client/src/main/java/backtype/storm/task/GeneralTopologyContext.java
deleted file mode 100644
index 4817fb4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/GeneralTopologyContext.java
+++ /dev/null
@@ -1,206 +0,0 @@
-package backtype.storm.task;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.json.simple.JSONAware;
-
-import backtype.storm.Config;
-import backtype.storm.Constants;
-import backtype.storm.generated.ComponentCommon;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.ThriftTopologyUtils;
-import backtype.storm.utils.Utils;
-
-public class GeneralTopologyContext implements JSONAware {
- private StormTopology _topology;
- private Map<Integer, String> _taskToComponent;
- private Map<String, List<Integer>> _componentToTasks;
- private Map<String, Map<String, Fields>> _componentToStreamToFields;
- private String _topologyId;
- protected Map _stormConf;
-
- // pass in componentToSortedTasks for the case of running tons of tasks in
- // single executor
- public GeneralTopologyContext(StormTopology topology, Map stormConf,
- Map<Integer, String> taskToComponent,
- Map<String, List<Integer>> componentToSortedTasks,
- Map<String, Map<String, Fields>> componentToStreamToFields,
- String topologyId) {
- _topology = topology;
- _stormConf = stormConf;
- _taskToComponent = taskToComponent;
- _topologyId = topologyId;
- _componentToTasks = componentToSortedTasks;
- _componentToStreamToFields = componentToStreamToFields;
- }
-
- /**
- * Gets the unique id assigned to this topology. The id is the storm name
- * with a unique nonce appended to it.
- *
- * @return the topology id
- */
- public String getTopologyId() {
- return _topologyId;
- }
-
- /**
- * Please use the getTopologId() instead.
- *
- * @return the topology id
- */
- @Deprecated
- public String getStormId() {
- return _topologyId;
- }
-
- /**
- * Gets the Thrift object representing the topology.
- *
- * @return the Thrift definition representing the topology
- */
- public StormTopology getRawTopology() {
- return _topology;
- }
-
- /**
- * Gets the component id for the specified task id. The component id maps to
- * a component id specified for a Spout or Bolt in the topology definition.
- *
- * @param taskId
- * the task id
- * @return the component id for the input task id
- */
- public String getComponentId(int taskId) {
- if (taskId == Constants.SYSTEM_TASK_ID) {
- return Constants.SYSTEM_COMPONENT_ID;
- } else {
- return _taskToComponent.get(taskId);
- }
- }
-
- /**
- * Gets the set of streams declared for the specified component.
- */
- public Set<String> getComponentStreams(String componentId) {
- return getComponentCommon(componentId).get_streams().keySet();
- }
-
- /**
- * Gets the task ids allocated for the given component id. The task ids are
- * always returned in ascending order.
- */
- public List<Integer> getComponentTasks(String componentId) {
- List<Integer> ret = _componentToTasks.get(componentId);
- if (ret == null)
- return new ArrayList<Integer>();
- else
- return new ArrayList<Integer>(ret);
- }
-
- /**
- * Gets the declared output fields for the specified component/stream.
- */
- public Fields getComponentOutputFields(String componentId, String streamId) {
- Fields ret = _componentToStreamToFields.get(componentId).get(streamId);
- if (ret == null) {
- throw new IllegalArgumentException(
- "No output fields defined for component:stream "
- + componentId + ":" + streamId);
- }
- return ret;
- }
-
- /**
- * Gets the declared output fields for the specified global stream id.
- */
- public Fields getComponentOutputFields(GlobalStreamId id) {
- return getComponentOutputFields(id.get_componentId(), id.get_streamId());
- }
-
- /**
- * Gets the declared inputs to the specified component.
- *
- * @return A map from subscribed component/stream to the grouping subscribed
- * with.
- */
- public Map<GlobalStreamId, Grouping> getSources(String componentId) {
- return getComponentCommon(componentId).get_inputs();
- }
-
- /**
- * Gets information about who is consuming the outputs of the specified
- * component, and how.
- *
- * @return Map from stream id to component id to the Grouping used.
- */
- public Map<String, Map<String, Grouping>> getTargets(String componentId) {
- Map<String, Map<String, Grouping>> ret = new HashMap<String, Map<String, Grouping>>();
- for (String otherComponentId : getComponentIds()) {
- Map<GlobalStreamId, Grouping> inputs = getComponentCommon(
- otherComponentId).get_inputs();
- for (GlobalStreamId id : inputs.keySet()) {
- if (id.get_componentId().equals(componentId)) {
- Map<String, Grouping> curr = ret.get(id.get_streamId());
- if (curr == null)
- curr = new HashMap<String, Grouping>();
- curr.put(otherComponentId, inputs.get(id));
- ret.put(id.get_streamId(), curr);
- }
- }
- }
- return ret;
- }
-
- @Override
- public String toJSONString() {
- Map obj = new HashMap();
- obj.put("task->component", _taskToComponent);
- // TODO: jsonify StormTopology
- // at the minimum should send source info
- return Utils.to_json(obj);
- }
-
- /**
- * Gets a map from task id to component id.
- */
- public Map<Integer, String> getTaskToComponent() {
- return _taskToComponent;
- }
-
- /**
- * Gets a list of all component ids in this topology
- */
- public Set<String> getComponentIds() {
- return ThriftTopologyUtils.getComponentIds(getRawTopology());
- }
-
- public ComponentCommon getComponentCommon(String componentId) {
- return ThriftTopologyUtils.getComponentCommon(getRawTopology(),
- componentId);
- }
-
- public int maxTopologyMessageTimeout() {
- Integer max = Utils.getInt(_stormConf
- .get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS));
- for (String spout : getRawTopology().get_spouts().keySet()) {
- ComponentCommon common = getComponentCommon(spout);
- String jsonConf = common.get_json_conf();
- if (jsonConf != null) {
- Map conf = (Map) Utils.from_json(jsonConf);
- Object comp = conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS);
- if (comp != null) {
- max = Math.max(Utils.getInt(comp), max);
- }
- }
- }
- return max;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/IBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/IBolt.java b/jstorm-client/src/main/java/backtype/storm/task/IBolt.java
deleted file mode 100644
index bfffa14..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/IBolt.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.tuple.Tuple;
-import java.util.Map;
-import java.io.Serializable;
-
-/**
- * An IBolt represents a component that takes tuples as input and produces
- * tuples as output. An IBolt can do everything from filtering to joining to
- * functions to aggregations. It does not have to process a tuple immediately
- * and may hold onto tuples to process later.
- *
- * <p>
- * A bolt's lifecycle is as follows:
- * </p>
- *
- * <p>
- * IBolt object created on client machine. The IBolt is serialized into the
- * topology (using Java serialization) and submitted to the master machine of
- * the cluster (Nimbus). Nimbus then launches workers which deserialize the
- * object, call prepare on it, and then start processing tuples.
- * </p>
- *
- * <p>
- * If you want to parameterize an IBolt, you should set the parameter's through
- * its constructor and save the parameterization state as instance variables
- * (which will then get serialized and shipped to every task executing this bolt
- * across the cluster).
- * </p>
- *
- * <p>
- * When defining bolts in Java, you should use the IRichBolt interface which
- * adds necessary methods for using the Java TopologyBuilder API.
- * </p>
- */
-public interface IBolt extends Serializable {
- /**
- * Called when a task for this component is initialized within a worker on
- * the cluster. It provides the bolt with the environment in which the bolt
- * executes.
- *
- * <p>
- * This includes the:
- * </p>
- *
- * @param stormConf
- * The Storm configuration for this bolt. This is the
- * configuration provided to the topology merged in with cluster
- * configuration on this machine.
- * @param context
- * This object can be used to get information about this task's
- * place within the topology, including the task id and component
- * id of this task, input and output information, etc.
- * @param collector
- * The collector is used to emit tuples from this bolt. Tuples
- * can be emitted at any time, including the prepare and cleanup
- * methods. The collector is thread-safe and should be saved as
- * an instance variable of this bolt object.
- */
- void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector);
-
- /**
- * Process a single tuple of input. The Tuple object contains metadata on it
- * about which component/stream/task it came from. The values of the Tuple
- * can be accessed using Tuple#getValue. The IBolt does not have to process
- * the Tuple immediately. It is perfectly fine to hang onto a tuple and
- * process it later (for instance, to do an aggregation or join).
- *
- * <p>
- * Tuples should be emitted using the OutputCollector provided through the
- * prepare method. It is required that all input tuples are acked or failed
- * at some point using the OutputCollector. Otherwise, Storm will be unable
- * to determine when tuples coming off the spouts have been completed.
- * </p>
- *
- * <p>
- * For the common case of acking an input tuple at the end of the execute
- * method, see IBasicBolt which automates this.
- * </p>
- *
- * @param input
- * The input tuple to be processed.
- */
- void execute(Tuple input);
-
- /**
- * Called when an IBolt is going to be shutdown. There is no guarentee that
- * cleanup will be called, because the supervisor kill -9's worker processes
- * on the cluster.
- *
- * <p>
- * The one context where cleanup is guaranteed to be called is when a
- * topology is killed when running Storm in local mode.
- * </p>
- */
- void cleanup();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/IErrorReporter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/IErrorReporter.java b/jstorm-client/src/main/java/backtype/storm/task/IErrorReporter.java
deleted file mode 100644
index ae04710..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/IErrorReporter.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package backtype.storm.task;
-
-public interface IErrorReporter {
- void reportError(Throwable error);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/IMetricsContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/IMetricsContext.java b/jstorm-client/src/main/java/backtype/storm/task/IMetricsContext.java
deleted file mode 100644
index d4ace69..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/IMetricsContext.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.metric.api.CombinedMetric;
-import backtype.storm.metric.api.ICombiner;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.metric.api.IReducer;
-import backtype.storm.metric.api.ReducedMetric;
-
-public interface IMetricsContext {
- <T extends IMetric> T registerMetric(String name, T metric,
- int timeBucketSizeInSecs);
-
- ReducedMetric registerMetric(String name, IReducer reducer,
- int timeBucketSizeInSecs);
-
- CombinedMetric registerMetric(String name, ICombiner combiner,
- int timeBucketSizeInSecs);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/IOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/IOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/task/IOutputCollector.java
deleted file mode 100644
index dcf2217..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/IOutputCollector.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.tuple.Tuple;
-import java.util.Collection;
-import java.util.List;
-
-public interface IOutputCollector extends IErrorReporter {
- /**
- * Returns the task ids that received the tuples.
- */
- List<Integer> emit(String streamId, Collection<Tuple> anchors,
- List<Object> tuple);
-
- void emitDirect(int taskId, String streamId, Collection<Tuple> anchors,
- List<Object> tuple);
-
- void ack(Tuple input);
-
- void fail(Tuple input);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/task/OutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/task/OutputCollector.java b/jstorm-client/src/main/java/backtype/storm/task/OutputCollector.java
deleted file mode 100644
index 9cee3a8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/task/OutputCollector.java
+++ /dev/null
@@ -1,245 +0,0 @@
-package backtype.storm.task;
-
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.List;
-
-/**
- * This output collector exposes the API for emitting tuples from an IRichBolt.
- * This is the core API for emitting tuples. For a simpler API, and a more
- * restricted form of stream processing, see IBasicBolt and
- * BasicOutputCollector.
- */
-public class OutputCollector implements IOutputCollector {
- private IOutputCollector _delegate;
-
- public OutputCollector(IOutputCollector delegate) {
- _delegate = delegate;
- }
-
- /**
- * Emits a new tuple to a specific stream with a single anchor. The emitted
- * values must be immutable.
- *
- * @param streamId
- * the stream to emit to
- * @param anchor
- * the tuple to anchor to
- * @param tuple
- * the new output tuple from this bolt
- * @return the list of task ids that this new tuple was sent to
- */
- public List<Integer> emit(String streamId, Tuple anchor, List<Object> tuple) {
- return emit(streamId, Arrays.asList(anchor), tuple);
- }
-
- /**
- * Emits a new unanchored tuple to the specified stream. Because it's
- * unanchored, if a failure happens downstream, this new tuple won't affect
- * whether any spout tuples are considered failed or not. The emitted values
- * must be immutable.
- *
- * @param streamId
- * the stream to emit to
- * @param tuple
- * the new output tuple from this bolt
- * @return the list of task ids that this new tuple was sent to
- */
- public List<Integer> emit(String streamId, List<Object> tuple) {
- return emit(streamId, (List) null, tuple);
- }
-
- /**
- * Emits a new tuple to the default stream anchored on a group of input
- * tuples. The emitted values must be immutable.
- *
- * @param anchors
- * the tuples to anchor to
- * @param tuple
- * the new output tuple from this bolt
- * @return the list of task ids that this new tuple was sent to
- */
- public List<Integer> emit(Collection<Tuple> anchors, List<Object> tuple) {
- return emit(Utils.DEFAULT_STREAM_ID, anchors, tuple);
- }
-
- /**
- * Emits a new tuple to the default stream anchored on a single tuple. The
- * emitted values must be immutable.
- *
- * @param anchor
- * the tuple to anchor to
- * @param tuple
- * the new output tuple from this bolt
- * @return the list of task ids that this new tuple was sent to
- */
- public List<Integer> emit(Tuple anchor, List<Object> tuple) {
- return emit(Utils.DEFAULT_STREAM_ID, anchor, tuple);
- }
-
- /**
- * Emits a new unanchored tuple to the default stream. Beacuse it's
- * unanchored, if a failure happens downstream, this new tuple won't affect
- * whether any spout tuples are considered failed or not. The emitted values
- * must be immutable.
- *
- * @param tuple
- * the new output tuple from this bolt
- * @return the list of task ids that this new tuple was sent to
- */
- public List<Integer> emit(List<Object> tuple) {
- return emit(Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- /**
- * Emits a tuple directly to the specified task id on the specified stream.
- * If the target bolt does not subscribe to this bolt using a direct
- * grouping, the tuple will not be sent. If the specified output stream is
- * not declared as direct, or the target bolt subscribes with a non-direct
- * grouping, an error will occur at runtime. The emitted values must be
- * immutable.
- *
- * @param taskId
- * the taskId to send the new tuple to
- * @param streamId
- * the stream to send the tuple on. It must be declared as a
- * direct stream in the topology definition.
- * @param anchor
- * the tuple to anchor to
- * @param tuple
- * the new output tuple from this bolt
- */
- public void emitDirect(int taskId, String streamId, Tuple anchor,
- List<Object> tuple) {
- emitDirect(taskId, streamId, Arrays.asList(anchor), tuple);
- }
-
- /**
- * Emits a tuple directly to the specified task id on the specified stream.
- * If the target bolt does not subscribe to this bolt using a direct
- * grouping, the tuple will not be sent. If the specified output stream is
- * not declared as direct, or the target bolt subscribes with a non-direct
- * grouping, an error will occur at runtime. Note that this method does not
- * use anchors, so downstream failures won't affect the failure status of
- * any spout tuples. The emitted values must be immutable.
- *
- * @param taskId
- * the taskId to send the new tuple to
- * @param streamId
- * the stream to send the tuple on. It must be declared as a
- * direct stream in the topology definition.
- * @param tuple
- * the new output tuple from this bolt
- */
- public void emitDirect(int taskId, String streamId, List<Object> tuple) {
- emitDirect(taskId, streamId, (List) null, tuple);
- }
-
- /**
- * Emits a tuple directly to the specified task id on the default stream. If
- * the target bolt does not subscribe to this bolt using a direct grouping,
- * the tuple will not be sent. If the specified output stream is not
- * declared as direct, or the target bolt subscribes with a non-direct
- * grouping, an error will occur at runtime. The emitted values must be
- * immutable.
- *
- * <p>
- * The default stream must be declared as direct in the topology definition.
- * See OutputDeclarer#declare for how this is done when defining topologies
- * in Java.
- * </p>
- *
- * @param taskId
- * the taskId to send the new tuple to
- * @param anchosr
- * the tuples to anchor to
- * @param tuple
- * the new output tuple from this bolt
- */
- public void emitDirect(int taskId, Collection<Tuple> anchors,
- List<Object> tuple) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, anchors, tuple);
- }
-
- /**
- * Emits a tuple directly to the specified task id on the default stream. If
- * the target bolt does not subscribe to this bolt using a direct grouping,
- * the tuple will not be sent. If the specified output stream is not
- * declared as direct, or the target bolt subscribes with a non-direct
- * grouping, an error will occur at runtime. The emitted values must be
- * immutable.
- *
- * <p>
- * The default stream must be declared as direct in the topology definition.
- * See OutputDeclarer#declare for how this is done when defining topologies
- * in Java.
- * </p>
- *
- * @param taskId
- * the taskId to send the new tuple to
- * @param anchor
- * the tuple to anchor to
- * @param tuple
- * the new output tuple from this bolt
- */
- public void emitDirect(int taskId, Tuple anchor, List<Object> tuple) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, anchor, tuple);
- }
-
- /**
- * Emits a tuple directly to the specified task id on the default stream. If
- * the target bolt does not subscribe to this bolt using a direct grouping,
- * the tuple will not be sent. If the specified output stream is not
- * declared as direct, or the target bolt subscribes with a non-direct
- * grouping, an error will occur at runtime. The emitted values must be
- * immutable.
- *
- * <p>
- * The default stream must be declared as direct in the topology definition.
- * See OutputDeclarer#declare for how this is done when defining topologies
- * in Java.
- * </p>
- *
- * <p>
- * Note that this method does not use anchors, so downstream failures won't
- * affect the failure status of any spout tuples.
- * </p>
- *
- * @param taskId
- * the taskId to send the new tuple to
- * @param tuple
- * the new output tuple from this bolt
- */
- public void emitDirect(int taskId, List<Object> tuple) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- @Override
- public List<Integer> emit(String streamId, Collection<Tuple> anchors,
- List<Object> tuple) {
- return _delegate.emit(streamId, anchors, tuple);
- }
-
- @Override
- public void emitDirect(int taskId, String streamId,
- Collection<Tuple> anchors, List<Object> tuple) {
- _delegate.emitDirect(taskId, streamId, anchors, tuple);
- }
-
- @Override
- public void ack(Tuple input) {
- _delegate.ack(input);
- }
-
- @Override
- public void fail(Tuple input) {
- _delegate.fail(input);
- }
-
- @Override
- public void reportError(Throwable error) {
- _delegate.reportError(error);
- }
-}
[45/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/Config.java b/jstorm-client/src/main/java/backtype/storm/Config.java
deleted file mode 100644
index baebbe8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/Config.java
+++ /dev/null
@@ -1,1382 +0,0 @@
-package backtype.storm;
-
-import backtype.storm.ConfigValidation;
-import backtype.storm.serialization.IKryoDecorator;
-import backtype.storm.serialization.IKryoFactory;
-
-import com.esotericsoftware.kryo.Serializer;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-/**
- * Topology configs are specified as a plain old map. This class provides a
- * convenient way to create a topology config map by providing setter methods
- * for all the configs that can be set. It also makes it easier to do things
- * like add serializations.
- *
- * <p>
- * This class also provides constants for all the configurations possible on a
- * Storm cluster and Storm topology. Each constant is paired with a schema that
- * defines the validity criterion of the corresponding field. Default values for
- * these configs can be found in defaults.yaml.
- * </p>
- *
- * <p>
- * Note that you may put other configurations in any of the configs. Storm will
- * ignore anything it doesn't recognize, but your topologies are free to make
- * use of them by reading them in the prepare method of Bolts or the open method
- * of Spouts.
- * </p>
- */
-public class Config extends HashMap<String, Object> {
- /**
- * The transporter for communication among Storm tasks
- */
- public static final String STORM_MESSAGING_TRANSPORT = "storm.messaging.transport";
- public static final Object STORM_MESSAGING_TRANSPORT_SCHEMA = String.class;
-
- /**
- * Netty based messaging: The buffer size for send/recv buffer
- */
- public static final String STORM_MESSAGING_NETTY_BUFFER_SIZE = "storm.messaging.netty.buffer_size";
- public static final Object STORM_MESSAGING_NETTY_BUFFER_SIZE_SCHEMA = Number.class;
-
- /**
- * Netty based messaging: The max # of retries that a peer will perform when
- * a remote is not accessible
- */
- public static final String STORM_MESSAGING_NETTY_MAX_RETRIES = "storm.messaging.netty.max_retries";
- public static final Object STORM_MESSAGING_NETTY_MAX_RETRIES_SCHEMA = Number.class;
-
- /**
- * Netty based messaging: The min # of milliseconds that a peer will wait.
- */
- public static final String STORM_MESSAGING_NETTY_MIN_SLEEP_MS = "storm.messaging.netty.min_wait_ms";
- public static final Object STORM_MESSAGING_NETTY_MIN_SLEEP_MS_SCHEMA = Number.class;
-
- /**
- * Netty based messaging: The max # of milliseconds that a peer will wait.
- */
- public static final String STORM_MESSAGING_NETTY_MAX_SLEEP_MS = "storm.messaging.netty.max_wait_ms";
- public static final Object STORM_MESSAGING_NETTY_MAX_SLEEP_MS_SCHEMA = Number.class;
-
- /**
- * Netty based messaging: The # of worker threads for the server.
- */
- public static final String STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS = "storm.messaging.netty.server_worker_threads";
- public static final Object STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS_SCHEMA = Number.class;
-
- /**
- * Netty based messaging: The # of worker threads for the client.
- */
- public static final String STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS = "storm.messaging.netty.client_worker_threads";
- public static final Object STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS_SCHEMA = Number.class;
-
- /**
- * If the Netty messaging layer is busy, the Netty client will try to batch message as more as possible up to the size of STORM_NETTY_MESSAGE_BATCH_SIZE bytes
- */
- public static final String STORM_NETTY_MESSAGE_BATCH_SIZE = "storm.messaging.netty.transfer.batch.size";
- public static final Object STORM_NETTY_MESSAGE_BATCH_SIZE_SCHEMA = Number.class;
-
- /**
- * We check with this interval that whether the Netty channel is writable and try to write pending messages
- */
- public static final String STORM_NETTY_FLUSH_CHECK_INTERVAL_MS = "storm.messaging.netty.flush.check.interval.ms";
- public static final Object STORM_NETTY_FLUSH_CHECK_INTERVAL_MS_SCHEMA = Number.class;
- /**
- * Netty based messaging: Is authentication required for Netty messaging from client worker process to server worker process.
- */
- public static final String STORM_MESSAGING_NETTY_AUTHENTICATION = "storm.messaging.netty.authentication";
- public static final Object STORM_MESSAGING_NETTY_AUTHENTICATION_SCHEMA = Boolean.class;
-
- /**
- * The delegate for serializing metadata, should be used for serialized objects stored in zookeeper and on disk.
- * This is NOT used for compressing serialized tuples sent between topologies.
- */
- public static final String STORM_META_SERIALIZATION_DELEGATE = "storm.meta.serialization.delegate";
- public static final Object STORM_META_SERIALIZATION_DELEGATE_SCHEMA = String.class;
-
- /**
- * A list of hosts of ZooKeeper servers used to manage the cluster.
- */
- public static final String STORM_ZOOKEEPER_SERVERS = "storm.zookeeper.servers";
- public static final Object STORM_ZOOKEEPER_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * The port Storm will use to connect to each of the ZooKeeper servers.
- */
- public static final String STORM_ZOOKEEPER_PORT = "storm.zookeeper.port";
- public static final Object STORM_ZOOKEEPER_PORT_SCHEMA = Number.class;
-
- /**
- * A directory on the local filesystem used by Storm for any local
- * filesystem usage it needs. The directory must exist and the Storm daemons
- * must have permission to read/write from this location.
- */
- public static final String STORM_LOCAL_DIR = "storm.local.dir";
- public static final Object STORM_LOCAL_DIR_SCHEMA = String.class;
-
- /**
- * A global task scheduler used to assign topologies's tasks to supervisors'
- * wokers.
- *
- * If this is not set, a default system scheduler will be used.
- */
- public static final String STORM_SCHEDULER = "storm.scheduler";
- public static final Object STORM_SCHEDULER_SCHEMA = String.class;
-
- /**
- * The mode this Storm cluster is running in. Either "distributed" or
- * "local".
- */
- public static final String STORM_CLUSTER_MODE = "storm.cluster.mode";
- public static final Object STORM_CLUSTER_MODE_SCHEMA = String.class;
-
- /**
- * The hostname the supervisors/workers should report to nimbus. If unset,
- * Storm will get the hostname to report by calling
- * <code>InetAddress.getLocalHost().getCanonicalHostName()</code>.
- *
- * You should set this config when you dont have a DNS which
- * supervisors/workers can utilize to find each other based on hostname got
- * from calls to
- * <code>InetAddress.getLocalHost().getCanonicalHostName()</code>.
- */
- public static final String STORM_LOCAL_HOSTNAME = "storm.local.hostname";
- public static final Object STORM_LOCAL_HOSTNAME_SCHEMA = String.class;
-
- /**
- * The plugin that will convert a principal to a local user.
- */
- public static final String STORM_PRINCIPAL_TO_LOCAL_PLUGIN = "storm.principal.tolocal";
- public static final Object STORM_PRINCIPAL_TO_LOCAL_PLUGIN_SCHEMA = String.class;
-
- /**
- * The plugin that will provide user groups service
- */
- public static final String STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN = "storm.group.mapping.service";
- public static final Object STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN_SCHEMA = String.class;
-
- /**
- * Max no.of seconds group mapping service will cache user groups
- */
- public static final String STORM_GROUP_MAPPING_SERVICE_CACHE_DURATION_SECS = "storm.group.mapping.service.cache.duration.secs";
- public static final Object STORM_GROUP_MAPPING_SERVICE_CACHE_DURATION_SECS_SCHEMA = Number.class;
-
- /**
- * The transport plug-in for Thrift client/server communication
- */
- public static final String STORM_THRIFT_TRANSPORT_PLUGIN = "storm.thrift.transport";
- public static final Object STORM_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
-
- /**
- * The serializer class for ListDelegate (tuple payload). The default
- * serializer will be ListDelegateSerializer
- */
- public static final String TOPOLOGY_TUPLE_SERIALIZER = "topology.tuple.serializer";
- public static final Object TOPOLOGY_TUPLE_SERIALIZER_SCHEMA = String.class;
-
- /**
- * Try to serialize all tuples, even for local transfers. This should only be used
- * for testing, as a sanity check that all of your tuples are setup properly.
- */
- public static final String TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE = "topology.testing.always.try.serialize";
- public static final Object TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE_SCHEMA = Boolean.class;
-
- /**
- * Whether or not to use ZeroMQ for messaging in local mode. If this is set
- * to false, then Storm will use a pure-Java messaging system. The purpose
- * of this flag is to make it easy to run Storm in local mode by eliminating
- * the need for native dependencies, which can be difficult to install.
- *
- * Defaults to false.
- */
- public static final String STORM_LOCAL_MODE_ZMQ = "storm.local.mode.zmq";
- public static final Object STORM_LOCAL_MODE_ZMQ_SCHEMA = Boolean.class;
-
- /**
- * The root location at which Storm stores data in ZooKeeper.
- */
- public static final String STORM_ZOOKEEPER_ROOT = "storm.zookeeper.root";
- public static final Object STORM_ZOOKEEPER_ROOT_SCHEMA = String.class;
-
- /**
- * The session timeout for clients to ZooKeeper.
- */
- public static final String STORM_ZOOKEEPER_SESSION_TIMEOUT = "storm.zookeeper.session.timeout";
- public static final Object STORM_ZOOKEEPER_SESSION_TIMEOUT_SCHEMA = Number.class;
-
- /**
- * The connection timeout for clients to ZooKeeper.
- */
- public static final String STORM_ZOOKEEPER_CONNECTION_TIMEOUT = "storm.zookeeper.connection.timeout";
- public static final Object STORM_ZOOKEEPER_CONNECTION_TIMEOUT_SCHEMA = Number.class;
-
- /**
- * The number of times to retry a Zookeeper operation.
- */
- public static final String STORM_ZOOKEEPER_RETRY_TIMES = "storm.zookeeper.retry.times";
- public static final Object STORM_ZOOKEEPER_RETRY_TIMES_SCHEMA = Number.class;
-
- /**
- * The interval between retries of a Zookeeper operation.
- */
- public static final String STORM_ZOOKEEPER_RETRY_INTERVAL = "storm.zookeeper.retry.interval";
- public static final Object STORM_ZOOKEEPER_RETRY_INTERVAL_SCHEMA = Number.class;
-
- /**
- * The ceiling of the interval between retries of a Zookeeper operation.
- */
- public static final String STORM_ZOOKEEPER_RETRY_INTERVAL_CEILING = "storm.zookeeper.retry.intervalceiling.millis";
- public static final Object STORM_ZOOKEEPER_RETRY_INTERVAL_CEILING_SCHEMA = Number.class;
-
- /**
- * The Zookeeper authentication scheme to use, e.g. "digest". Defaults to no
- * authentication.
- */
- public static final String STORM_ZOOKEEPER_AUTH_SCHEME = "storm.zookeeper.auth.scheme";
- public static final Object STORM_ZOOKEEPER_AUTH_SCHEME_SCHEMA = String.class;
-
- /**
- * A string representing the payload for Zookeeper authentication. It gets
- * serialized using UTF-8 encoding during authentication.
- */
- public static final String STORM_ZOOKEEPER_AUTH_PAYLOAD = "storm.zookeeper.auth.payload";
- public static final Object STORM_ZOOKEEPER_AUTH_PAYLOAD_SCHEMA = String.class;
-
- /**
- * The topology Zookeeper authentication scheme to use, e.g. "digest". Defaults to no authentication.
- */
- public static final String STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME="storm.zookeeper.topology.auth.scheme";
- public static final Object STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME_SCHEMA = String.class;
-
- /**
- * A string representing the payload for topology Zookeeper authentication. It gets serialized using UTF-8 encoding during authentication.
- */
- public static final String STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD="storm.zookeeper.topology.auth.payload";
- public static final Object STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD_SCHEMA = String.class;
-
- /**
- * The id assigned to a running topology. The id is the storm name with a
- * unique nonce appended.
- */
- public static final String TOPOLOGY_ID = "topology.id";
- public static final Object TOPOLOGY_ID_SCHEMA = String.class;
-
- /**
- * The number of times to retry a Nimbus operation.
- */
- public static final String STORM_NIMBUS_RETRY_TIMES="storm.nimbus.retry.times";
- public static final Object STORM_NIMBUS_RETRY_TIMES_SCHEMA = Number.class;
-
- /**
- * The starting interval between exponential backoff retries of a Nimbus operation.
- */
- public static final String STORM_NIMBUS_RETRY_INTERVAL="storm.nimbus.retry.interval.millis";
- public static final Object STORM_NIMBUS_RETRY_INTERVAL_SCHEMA = Number.class;
-
- /**
- * The ceiling of the interval between retries of a client connect to Nimbus operation.
- */
- public static final String STORM_NIMBUS_RETRY_INTERVAL_CEILING="storm.nimbus.retry.intervalceiling.millis";
- public static final Object STORM_NIMBUS_RETRY_INTERVAL_CEILING_SCHEMA = Number.class;
- /**
- * The host that the master server is running on.
- */
- public static final String NIMBUS_HOST = "nimbus.host";
- public static final Object NIMBUS_HOST_SCHEMA = String.class;
-
- /**
- * The Nimbus transport plug-in for Thrift client/server communication
- */
- public static final String NIMBUS_THRIFT_TRANSPORT_PLUGIN = "nimbus.thrift.transport";
- public static final Object NIMBUS_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
-
- /**
- * Which port the Thrift interface of Nimbus should run on. Clients should
- * connect to this port to upload jars and submit topologies.
- */
- public static final String NIMBUS_THRIFT_PORT = "nimbus.thrift.port";
- public static final Object NIMBUS_THRIFT_PORT_SCHEMA = Number.class;
-
- /**
- * The number of threads that should be used by the nimbus thrift server.
- */
- public static final String NIMBUS_THRIFT_THREADS = "nimbus.thrift.threads";
- public static final Object NIMBUS_THRIFT_THREADS_SCHEMA = Number.class;
-
- /**
- * A list of users that are cluster admins and can run any command. To use this set
- * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
- */
- public static final String NIMBUS_ADMINS = "nimbus.admins";
- public static final Object NIMBUS_ADMINS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * A list of users that run the supervisors and should be authorized to interact with
- * nimbus as a supervisor would. To use this set
- * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
- */
- public static final String NIMBUS_SUPERVISOR_USERS = "nimbus.supervisor.users";
- public static final Object NIMBUS_SUPERVISOR_USERS_SCHEMA = ConfigValidation.StringsValidator;
- /**
- * The maximum buffer size thrift should use when reading messages.
- */
- public static final String NIMBUS_THRIFT_MAX_BUFFER_SIZE = "nimbus.thrift.max_buffer_size";
- public static final Object NIMBUS_THRIFT_MAX_BUFFER_SIZE_SCHEMA = Number.class;
-
- /**
- * This parameter is used by the storm-deploy project to configure the jvm
- * options for the nimbus daemon.
- */
- public static final String NIMBUS_CHILDOPTS = "nimbus.childopts";
- public static final Object NIMBUS_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * How long without heartbeating a task can go before nimbus will consider
- * the task dead and reassign it to another location.
- */
- public static final String NIMBUS_TASK_TIMEOUT_SECS = "nimbus.task.timeout.secs";
- public static final Object NIMBUS_TASK_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * How often nimbus should wake up to check heartbeats and do reassignments.
- * Note that if a machine ever goes down Nimbus will immediately wake up and
- * take action. This parameter is for checking for failures when there's no
- * explicit event like that occuring.
- */
- public static final String NIMBUS_MONITOR_FREQ_SECS = "nimbus.monitor.freq.secs";
- public static final Object NIMBUS_MONITOR_FREQ_SECS_SCHEMA = Number.class;
-
- /**
- * How often nimbus should wake the cleanup thread to clean the inbox.
- *
- * @see NIMBUS_INBOX_JAR_EXPIRATION_SECS
- */
- public static final String NIMBUS_CLEANUP_INBOX_FREQ_SECS = "nimbus.cleanup.inbox.freq.secs";
- public static final Object NIMBUS_CLEANUP_INBOX_FREQ_SECS_SCHEMA = Number.class;
-
- /**
- * The length of time a jar file lives in the inbox before being deleted by
- * the cleanup thread.
- *
- * Probably keep this value greater than or equal to
- * NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS. Note that the time it takes to
- * delete an inbox jar file is going to be somewhat more than
- * NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS (depending on how often
- * NIMBUS_CLEANUP_FREQ_SECS is set to).
- *
- * @see NIMBUS_CLEANUP_FREQ_SECS
- */
- public static final String NIMBUS_INBOX_JAR_EXPIRATION_SECS = "nimbus.inbox.jar.expiration.secs";
- public static final Object NIMBUS_INBOX_JAR_EXPIRATION_SECS_SCHEMA = Number.class;
-
- /**
- * How long before a supervisor can go without heartbeating before nimbus
- * considers it dead and stops assigning new work to it.
- */
- public static final String NIMBUS_SUPERVISOR_TIMEOUT_SECS = "nimbus.supervisor.timeout.secs";
- public static final Object NIMBUS_SUPERVISOR_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * A special timeout used when a task is initially launched. During launch,
- * this is the timeout used until the first heartbeat, overriding
- * nimbus.task.timeout.secs.
- *
- * <p>
- * A separate timeout exists for launch because there can be quite a bit of
- * overhead to launching new JVM's and configuring them.
- * </p>
- */
- public static final String NIMBUS_TASK_LAUNCH_SECS = "nimbus.task.launch.secs";
- public static final Object NIMBUS_TASK_LAUNCH_SECS_SCHEMA = Number.class;
-
- /**
- * Whether or not nimbus should reassign tasks if it detects that a task
- * goes down. Defaults to true, and it's not recommended to change this
- * value.
- */
- public static final String NIMBUS_REASSIGN = "nimbus.reassign";
- public static final Object NIMBUS_REASSIGN_SCHEMA = Boolean.class;
-
- /**
- * During upload/download with the master, how long an upload or download
- * connection is idle before nimbus considers it dead and drops the
- * connection.
- */
- public static final String NIMBUS_FILE_COPY_EXPIRATION_SECS = "nimbus.file.copy.expiration.secs";
- public static final Object NIMBUS_FILE_COPY_EXPIRATION_SECS_SCHEMA = Number.class;
-
- /**
- * A custom class that implements ITopologyValidator that is run whenever a
- * topology is submitted. Can be used to provide business-specific logic for
- * whether topologies are allowed to run or not.
- */
- public static final String NIMBUS_TOPOLOGY_VALIDATOR = "nimbus.topology.validator";
- public static final Object NIMBUS_TOPOLOGY_VALIDATOR_SCHEMA = String.class;
-
- /**
- * Class name for authorization plugin for Nimbus
- */
- public static final String NIMBUS_AUTHORIZER = "nimbus.authorizer";
- public static final Object NIMBUS_AUTHORIZER_SCHEMA = String.class;
-
- /**
- * How often nimbus should wake up to renew credentials if needed.
- */
- public static final String NIMBUS_CREDENTIAL_RENEW_FREQ_SECS = "nimbus.credential.renewers.freq.secs";
- public static final Object NIMBUS_CREDENTIAL_RENEW_FREQ_SECS_SCHEMA = Number.class;
-
- /**
- * A list of credential renewers that nimbus should load.
- */
- public static final String NIMBUS_CREDENTIAL_RENEWERS = "nimbus.credential.renewers.classes";
- public static final Object NIMBUS_CREDENTIAL_RENEWERS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * A list of plugins that nimbus should load during submit topology to populate
- * credentials on user's behalf.
- */
- public static final String NIMBUS_AUTO_CRED_PLUGINS = "nimbus.autocredential.plugins.classes";
- public static final Object NIMBUS_AUTO_CRED_PLUGINS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * Storm UI binds to this host/interface.
- */
- public static final String UI_HOST = "ui.host";
- public static final Object UI_HOST_SCHEMA = String.class;
- /**
- * Storm UI binds to this port.
- */
- public static final String UI_PORT = "ui.port";
- public static final Object UI_PORT_SCHEMA = Number.class;
-
- /**
- * HTTP UI port for log viewer
- */
- public static final String LOGVIEWER_PORT = "logviewer.port";
- public static final Object LOGVIEWER_PORT_SCHEMA = Number.class;
-
- /**
- * Childopts for log viewer java process.
- */
- public static final String LOGVIEWER_CHILDOPTS = "logviewer.childopts";
- public static final Object LOGVIEWER_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * Appender name used by log viewer to determine log directory.
- */
- public static final String LOGVIEWER_APPENDER_NAME = "logviewer.appender.name";
- public static final Object LOGVIEWER_APPENDER_NAME_SCHEMA = String.class;
-
- /**
- * Childopts for Storm UI Java process.
- */
- public static final String UI_CHILDOPTS = "ui.childopts";
- public static final Object UI_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * List of DRPC servers so that the DRPCSpout knows who to talk to.
- */
- public static final String DRPC_SERVERS = "drpc.servers";
- public static final Object DRPC_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * This port is used by Storm DRPC for receiving HTTP DPRC requests from clients.
- */
- public static final String DRPC_HTTP_PORT = "drpc.http.port";
- public static final Object DRPC_HTTP_PORT_SCHEMA = Number.class;
-
- /**
- * This port is used by Storm DRPC for receiving HTTPS (SSL) DPRC requests from clients.
- */
- public static final String DRPC_HTTPS_PORT = "drpc.https.port";
- public static final Object DRPC_HTTPS_PORT_SCHEMA = Number.class;
-
- /**
- * Path to the keystore used by Storm DRPC for setting up HTTPS (SSL).
- */
- public static final String DRPC_HTTPS_KEYSTORE_PATH = "drpc.https.keystore.path";
- public static final Object DRPC_HTTPS_KEYSTORE_PATH_SCHEMA = String.class;
-
- /**
- * Password to the keystore used by Storm DRPC for setting up HTTPS (SSL).
- */
- public static final String DRPC_HTTPS_KEYSTORE_PASSWORD = "drpc.https.keystore.password";
- public static final Object DRPC_HTTPS_KEYSTORE_PASSWORD_SCHEMA = String.class;
-
- /**
- * Type of keystore used by Storm DRPC for setting up HTTPS (SSL).
- * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details.
- */
- public static final String DRPC_HTTPS_KEYSTORE_TYPE = "drpc.https.keystore.type";
- public static final Object DRPC_HTTPS_KEYSTORE_TYPE_SCHEMA = String.class;
-
- /**
- * The DRPC transport plug-in for Thrift client/server communication
- */
- public static final String DRPC_THRIFT_TRANSPORT_PLUGIN = "drpc.thrift.transport";
- public static final Object DRPC_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
- /**
- * This port is used by Storm DRPC for receiving DPRC requests from clients.
- */
- public static final String DRPC_PORT = "drpc.port";
- public static final Object DRPC_PORT_SCHEMA = Number.class;
-
- /**
- * Class name for authorization plugin for DRPC client
- */
- public static final String DRPC_AUTHORIZER = "drpc.authorizer";
- public static final Object DRPC_AUTHORIZER_SCHEMA = String.class;
-
- /**
- * The Access Control List for the DRPC Authorizer.
- * @see DRPCSimpleAclAuthorizer
- */
- public static final String DRPC_AUTHORIZER_ACL = "drpc.authorizer.acl";
- public static final Object DRPC_AUTHORIZER_ACL_SCHEMA = Map.class;
-
- /**
- * File name of the DRPC Authorizer ACL.
- * @see DRPCSimpleAclAuthorizer
- */
- public static final String DRPC_AUTHORIZER_ACL_FILENAME = "drpc.authorizer.acl.filename";
- public static final Object DRPC_AUTHORIZER_ACL_FILENAME_SCHEMA = String.class;
-
- /**
- * Whether the DRPCSimpleAclAuthorizer should deny requests for operations
- * involving functions that have no explicit ACL entry. When set to false
- * (the default) DRPC functions that have no entry in the ACL will be
- * permitted, which is appropriate for a development environment. When set
- * to true, explicit ACL entries are required for every DRPC function, and
- * any request for functions will be denied.
- * @see DRPCSimpleAclAuthorizer
- */
- public static final String DRPC_AUTHORIZER_ACL_STRICT = "drpc.authorizer.acl.strict";
- public static final Object DRPC_AUTHORIZER_ACL_STRICT_SCHEMA = Boolean.class;
-
- /**
- * DRPC thrift server worker threads
- */
- public static final String DRPC_WORKER_THREADS = "drpc.worker.threads";
- public static final Object DRPC_WORKER_THREADS_SCHEMA = Number.class;
-
- /**
- * The maximum buffer size thrift should use when reading messages for DRPC.
- */
- public static final String DRPC_MAX_BUFFER_SIZE = "drpc.max_buffer_size";
- public static final Object DRPC_MAX_BUFFER_SIZE_SCHEMA = Number.class;
-
- /**
- * DRPC thrift server queue size
- */
- public static final String DRPC_QUEUE_SIZE = "drpc.queue.size";
- public static final Object DRPC_QUEUE_SIZE_SCHEMA = Number.class;
-
- /**
- * The DRPC invocations transport plug-in for Thrift client/server communication
- */
- public static final String DRPC_INVOCATIONS_THRIFT_TRANSPORT_PLUGIN = "drpc.invocations.thrift.transport";
- public static final Object DRPC_INVOCATIONS_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
-
- /**
- * This port on Storm DRPC is used by DRPC topologies to receive function
- * invocations and send results back.
- */
- public static final String DRPC_INVOCATIONS_PORT = "drpc.invocations.port";
- public static final Object DRPC_INVOCATIONS_PORT_SCHEMA = Number.class;
-
- /**
- * DRPC invocations thrift server worker threads
- */
- public static final String DRPC_INVOCATIONS_THREADS = "drpc.invocations.threads";
- public static final Object DRPC_INVOCATIONS_THREADS_SCHEMA = Number.class;
-
- /**
- * The timeout on DRPC requests within the DRPC server. Defaults to 10
- * minutes. Note that requests can also timeout based on the socket timeout
- * on the DRPC client, and separately based on the topology message timeout
- * for the topology implementing the DRPC function.
- */
- public static final String DRPC_REQUEST_TIMEOUT_SECS = "drpc.request.timeout.secs";
- public static final Object DRPC_REQUEST_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * Childopts for Storm DRPC Java process.
- */
- public static final String DRPC_CHILDOPTS = "drpc.childopts";
- public static final Object DRPC_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * Class name of the HTTP credentials plugin for the UI.
- */
- public static final String UI_HTTP_CREDS_PLUGIN = "ui.http.creds.plugin";
- public static final Object UI_HTTP_CREDS_PLUGIN_SCHEMA = String.class;
-
- /**
- * Class name of the HTTP credentials plugin for DRPC.
- */
- public static final String DRPC_HTTP_CREDS_PLUGIN = "drpc.http.creds.plugin";
- public static final Object DRPC_HTTP_CREDS_PLUGIN_SCHEMA = String.class;
- /**
- * the metadata configured on the supervisor
- */
- public static final String SUPERVISOR_SCHEDULER_META = "supervisor.scheduler.meta";
- public static final Object SUPERVISOR_SCHEDULER_META_SCHEMA = Map.class;
-
- /**
- * A list of ports that can run workers on this supervisor. Each worker uses
- * one port, and the supervisor will only run one worker per port. Use this
- * configuration to tune how many workers run on each machine.
- */
- public static final String SUPERVISOR_SLOTS_PORTS = "supervisor.slots.ports";
- public static final Object SUPERVISOR_SLOTS_PORTS_SCHEMA = ConfigValidation.NumbersValidator;
-
- /**
- * A number representing the maximum number of workers any single topology can acquire.
- */
- public static final String NIMBUS_SLOTS_PER_TOPOLOGY = "nimbus.slots.perTopology";
- public static final Object NIMBUS_SLOTS_PER_TOPOLOGY_SCHEMA = Number.class;
-
- /**
- * A class implementing javax.servlet.Filter for DRPC HTTP requests
- */
- public static final String DRPC_HTTP_FILTER = "drpc.http.filter";
- public static final Object DRPC_HTTP_FILTER_SCHEMA = String.class;
-
- /**
- * Initialization parameters for the javax.servlet.Filter of the DRPC HTTP
- * service
- */
- public static final String DRPC_HTTP_FILTER_PARAMS = "drpc.http.filter.params";
- public static final Object DRPC_HTTP_FILTER_PARAMS_SCHEMA = Map.class;
-
- /**
- * A number representing the maximum number of executors any single topology can acquire.
- */
- public static final String NIMBUS_EXECUTORS_PER_TOPOLOGY = "nimbus.executors.perTopology";
- public static final Object NIMBUS_EXECUTORS_PER_TOPOLOGY_SCHEMA = Number.class;
- /**
- * This parameter is used by the storm-deploy project to configure the
- * jvm options for the supervisor daemon.
- */
- public static final String SUPERVISOR_CHILDOPTS = "supervisor.childopts";
- public static final Object SUPERVISOR_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * How long a worker can go without heartbeating before the supervisor tries
- * to restart the worker process.
- */
- public static final String SUPERVISOR_WORKER_TIMEOUT_SECS = "supervisor.worker.timeout.secs";
- public static final Object SUPERVISOR_WORKER_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * How long a worker can go without heartbeating during the initial launch
- * before the supervisor tries to restart the worker process. This value
- * override supervisor.worker.timeout.secs during launch because there is
- * additional overhead to starting and configuring the JVM on launch.
- */
- public static final String SUPERVISOR_WORKER_START_TIMEOUT_SECS = "supervisor.worker.start.timeout.secs";
- public static final Object SUPERVISOR_WORKER_START_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * Whether or not the supervisor should launch workers assigned to it.
- * Defaults to true -- and you should probably never change this value. This
- * configuration is used in the Storm unit tests.
- */
- public static final String SUPERVISOR_ENABLE = "supervisor.enable";
- public static final Object SUPERVISOR_ENABLE_SCHEMA = Boolean.class;
-
- /**
- * how often the supervisor sends a heartbeat to the master.
- */
- public static final String SUPERVISOR_HEARTBEAT_FREQUENCY_SECS = "supervisor.heartbeat.frequency.secs";
- public static final Object SUPERVISOR_HEARTBEAT_FREQUENCY_SECS_SCHEMA = Number.class;
-
- /**
- * How often the supervisor checks the worker heartbeats to see if any of
- * them need to be restarted.
- */
- public static final String SUPERVISOR_MONITOR_FREQUENCY_SECS = "supervisor.monitor.frequency.secs";
- public static final Object SUPERVISOR_MONITOR_FREQUENCY_SECS_SCHEMA = Number.class;
-
- /**
- * Should the supervior try to run the worker as the lauching user or not. Defaults to false.
- */
- public static final String SUPERVISOR_RUN_WORKER_AS_USER = "supervisor.run.worker.as.user";
- public static final Object SUPERVISOR_RUN_WORKER_AS_USER_SCHEMA = Boolean.class;
-
- /**
- * Full path to the worker-laucher executable that will be used to lauch workers when
- * SUPERVISOR_RUN_WORKER_AS_USER is set to true.
- */
- public static final String SUPERVISOR_WORKER_LAUNCHER = "supervisor.worker.launcher";
- public static final Object SUPERVISOR_WORKER_LAUNCHER_SCHEMA = String.class;
-
- /**
- * The jvm opts provided to workers launched by this supervisor. All "%ID%"
- * substrings are replaced with an identifier for this worker.
- */
- public static final String WORKER_CHILDOPTS = "worker.childopts";
- public static final Object WORKER_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * control how many worker receiver threads we need per worker
- */
- public static final String WORKER_RECEIVER_THREAD_COUNT = "topology.worker.receiver.thread.count";
- public static final Object WORKER_RECEIVER_THREAD_COUNT_SCHEMA = Number.class;
-
- /**
- * How often this worker should heartbeat to the supervisor.
- */
- public static final String WORKER_HEARTBEAT_FREQUENCY_SECS = "worker.heartbeat.frequency.secs";
- public static final Object WORKER_HEARTBEAT_FREQUENCY_SECS_SCHEMA = Number.class;
-
- /**
- * How often a task should heartbeat its status to the master.
- */
- public static final String TASK_HEARTBEAT_FREQUENCY_SECS = "task.heartbeat.frequency.secs";
- public static final Object TASK_HEARTBEAT_FREQUENCY_SECS_SCHEMA = Number.class;
-
- /**
- * How often a task should sync its connections with other tasks (if a task
- * is reassigned, the other tasks sending messages to it need to refresh
- * their connections). In general though, when a reassignment happens other
- * tasks will be notified almost immediately. This configuration is here
- * just in case that notification doesn't come through.
- */
- public static final String TASK_REFRESH_POLL_SECS = "task.refresh.poll.secs";
- public static final Object TASK_REFRESH_POLL_SECS_SCHEMA = Number.class;
-
-
- /**
- * A list of users that are allowed to interact with the topology. To use this set
- * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
- */
- public static final String TOPOLOGY_USERS = "topology.users";
- public static final Object TOPOLOGY_USERS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * A list of groups that are allowed to interact with the topology. To use this set
- * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
- */
- public static final String TOPOLOGY_GROUPS = "topology.groups";
- public static final Object TOPOLOGY_GROUPS_SCHEMA = ConfigValidation.StringsValidator;
- /**
- * True if Storm should timeout messages or not. Defaults to true. This is
- * meant to be used in unit tests to prevent tuples from being accidentally
- * timed out during the test.
- */
- public static final String TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS = "topology.enable.message.timeouts";
- public static final Object TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS_SCHEMA = Boolean.class;
-
- /**
- * When set to true, Storm will log every message that's emitted.
- */
- public static final String TOPOLOGY_DEBUG = "topology.debug";
- public static final Object TOPOLOGY_DEBUG_SCHEMA = Boolean.class;
-
- /**
- * Whether or not the master should optimize topologies by running multiple
- * tasks in a single thread where appropriate.
- */
- public static final String TOPOLOGY_OPTIMIZE = "topology.optimize";
- public static final Object TOPOLOGY_OPTIMIZE_SCHEMA = Boolean.class;
-
- /**
- * How many processes should be spawned around the cluster to execute this
- * topology. Each process will execute some number of tasks as threads
- * within them. This parameter should be used in conjunction with the
- * parallelism hints on each component in the topology to tune the
- * performance of a topology.
- */
- public static final String TOPOLOGY_WORKERS = "topology.workers";
- public static final Object TOPOLOGY_WORKERS_SCHEMA = Number.class;
-
- /**
- * How many instances to create for a spout/bolt. A task runs on a thread
- * with zero or more other tasks for the same spout/bolt. The number of
- * tasks for a spout/bolt is always the same throughout the lifetime of a
- * topology, but the number of executors (threads) for a spout/bolt can
- * change over time. This allows a topology to scale to more or less
- * resources without redeploying the topology or violating the constraints
- * of Storm (such as a fields grouping guaranteeing that the same value goes
- * to the same task).
- */
- public static final String TOPOLOGY_TASKS = "topology.tasks";
- public static final Object TOPOLOGY_TASKS_SCHEMA = Number.class;
-
- /**
- * How many executors to spawn for ackers.
- *
- * <p>
- * If this is set to 0, then Storm will immediately ack tuples as soon as
- * they come off the spout, effectively disabling reliability.
- * </p>
- */
- public static final String TOPOLOGY_ACKER_EXECUTORS = "topology.acker.executors";
- public static final Object TOPOLOGY_ACKER_EXECUTORS_SCHEMA = Number.class;
-
- /**
- * Add TOPOLOGY_ACKERS is to compatible old storm code
- */
- public static final String TOPOLOGY_ACKERS = TOPOLOGY_ACKER_EXECUTORS;
- public static final Object TOPOLOGY_ACKER_SCHEMA = Number.class;
-
- /**
- * The maximum amount of time given to the topology to fully process a
- * message emitted by a spout. If the message is not acked within this time
- * frame, Storm will fail the message on the spout. Some spouts
- * implementations will then replay the message at a later time.
- */
- public static final String TOPOLOGY_MESSAGE_TIMEOUT_SECS = "topology.message.timeout.secs";
- public static final Object TOPOLOGY_MESSAGE_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * A list of serialization registrations for Kryo (
- * http://code.google.com/p/kryo/ ), the underlying serialization framework
- * for Storm. A serialization can either be the name of a class (in which
- * case Kryo will automatically create a serializer for the class that saves
- * all the object's fields), or an implementation of
- * com.esotericsoftware.kryo.Serializer.
- *
- * See Kryo's documentation for more information about writing custom
- * serializers.
- */
- public static final String TOPOLOGY_KRYO_REGISTER = "topology.kryo.register";
- public static final Object TOPOLOGY_KRYO_REGISTER_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * A list of classes that customize storm's kryo instance during start-up.
- * Each listed class name must implement IKryoDecorator. During start-up the
- * listed class is instantiated with 0 arguments, then its 'decorate' method
- * is called with storm's kryo instance as the only argument.
- */
- public static final String TOPOLOGY_KRYO_DECORATORS = "topology.kryo.decorators";
- public static final Object TOPOLOGY_KRYO_DECORATORS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * Class that specifies how to create a Kryo instance for serialization.
- * Storm will then apply topology.kryo.register and topology.kryo.decorators
- * on top of this. The default implementation implements
- * topology.fall.back.on.java.serialization and turns references off.
- */
- public static final String TOPOLOGY_KRYO_FACTORY = "topology.kryo.factory";
- public static final Object TOPOLOGY_KRYO_FACTORY_SCHEMA = String.class;
-
- /**
- * Whether or not Storm should skip the loading of kryo registrations for
- * which it does not know the class or have the serializer implementation.
- * Otherwise, the task will fail to load and will throw an error at runtime.
- * The use case of this is if you want to declare your serializations on the
- * storm.yaml files on the cluster rather than every single time you submit
- * a topology. Different applications may use different serializations and
- * so a single application may not have the code for the other serializers
- * used by other apps. By setting this config to true, Storm will ignore
- * that it doesn't have those other serializations rather than throw an
- * error.
- */
- public static final String TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS = "topology.skip.missing.kryo.registrations";
- public static final Object TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS_SCHEMA = Boolean.class;
-
- /*
- * A list of classes implementing IMetricsConsumer (See storm.yaml.example
- * for exact config format). Each listed class will be routed all the
- * metrics data generated by the storm metrics API. Each listed class maps
- * 1:1 to a system bolt named __metrics_ClassName#N, and it's parallelism is
- * configurable.
- */
- public static final String TOPOLOGY_METRICS_CONSUMER_REGISTER = "topology.metrics.consumer.register";
- public static final Object TOPOLOGY_METRICS_CONSUMER_REGISTER_SCHEMA = ConfigValidation.MapsValidator;
-
- /**
- * The maximum parallelism allowed for a component in this topology. This
- * configuration is typically used in testing to limit the number of threads
- * spawned in local mode.
- */
- public static final String TOPOLOGY_MAX_TASK_PARALLELISM = "topology.max.task.parallelism";
- public static final Object TOPOLOGY_MAX_TASK_PARALLELISM_SCHEMA = Number.class;
-
- /**
- * The maximum number of tuples that can be pending on a spout task at any
- * given time. This config applies to individual tasks, not to spouts or
- * topologies as a whole.
- *
- * A pending tuple is one that has been emitted from a spout but has not
- * been acked or failed yet. Note that this config parameter has no effect
- * for unreliable spouts that don't tag their tuples with a message id.
- */
- public static final String TOPOLOGY_MAX_SPOUT_PENDING = "topology.max.spout.pending";
- public static final Object TOPOLOGY_MAX_SPOUT_PENDING_SCHEMA = Number.class;
-
- /**
- * A class that implements a strategy for what to do when a spout needs to
- * wait. Waiting is triggered in one of two conditions:
- *
- * 1. nextTuple emits no tuples 2. The spout has hit maxSpoutPending and
- * can't emit any more tuples
- */
- public static final String TOPOLOGY_SPOUT_WAIT_STRATEGY = "topology.spout.wait.strategy";
- public static final Object TOPOLOGY_SPOUT_WAIT_STRATEGY_SCHEMA = String.class;
-
- /**
- * The amount of milliseconds the SleepEmptyEmitStrategy should sleep for.
- */
- public static final String TOPOLOGY_SLEEP_SPOUT_WAIT_STRATEGY_TIME_MS = "topology.sleep.spout.wait.strategy.time.ms";
- public static final Object TOPOLOGY_SLEEP_SPOUT_WAIT_STRATEGY_TIME_MS_SCHEMA = Number.class;
-
- /**
- * The maximum amount of time a component gives a source of state to
- * synchronize before it requests synchronization again.
- */
- public static final String TOPOLOGY_STATE_SYNCHRONIZATION_TIMEOUT_SECS = "topology.state.synchronization.timeout.secs";
- public static final Object TOPOLOGY_STATE_SYNCHRONIZATION_TIMEOUT_SECS_SCHEMA = Number.class;
-
- /**
- * The percentage of tuples to sample to produce stats for a task.
- */
- public static final String TOPOLOGY_STATS_SAMPLE_RATE = "topology.stats.sample.rate";
- public static final Object TOPOLOGY_STATS_SAMPLE_RATE_SCHEMA = Number.class;
-
- /**
- * The time period that builtin metrics data in bucketed into.
- */
- public static final String TOPOLOGY_BUILTIN_METRICS_BUCKET_SIZE_SECS = "topology.builtin.metrics.bucket.size.secs";
- public static final Object TOPOLOGY_BUILTIN_METRICS_BUCKET_SIZE_SECS_SCHEMA = Number.class;
-
- /**
- * Whether or not to use Java serialization in a topology.
- */
- public static final String TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION = "topology.fall.back.on.java.serialization";
- public static final Object TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION_SCHEMA = Boolean.class;
-
- /**
- * Topology-specific options for the worker child process. This is used in
- * addition to WORKER_CHILDOPTS.
- */
- public static final String TOPOLOGY_WORKER_CHILDOPTS = "topology.worker.childopts";
- public static final Object TOPOLOGY_WORKER_CHILDOPTS_SCHEMA = String.class;
-
- /**
- * Topology-specific environment variables for the worker child process.
- * This is added to the existing environment (that of the supervisor)
- */
- public static final String TOPOLOGY_ENVIRONMENT="topology.environment";
- public static final Object TOPOLOGY_ENVIRONMENT_SCHEMA = Map.class;
-
- /*
- * Topology-specific option to disable/enable bolt's outgoing overflow buffer.
- * Enabling this option ensures that the bolt can always clear the incoming messages,
- * preventing live-lock for the topology with cyclic flow.
- * The overflow buffer can fill degrading the performance gradually,
- * eventually running out of memory.
- */
- public static final String TOPOLOGY_BOLTS_OUTGOING_OVERFLOW_BUFFER_ENABLE="topology.bolts.outgoing.overflow.buffer.enable";
- public static final Object TOPOLOGY_BOLTS_OUTGOING_OVERFLOW_BUFFER_ENABLE_SCHEMA = Boolean.class;
-
- /**
- * This config is available for TransactionalSpouts, and contains the id ( a
- * String) for the transactional topology. This id is used to store the
- * state of the transactional topology in Zookeeper.
- */
- public static final String TOPOLOGY_TRANSACTIONAL_ID = "topology.transactional.id";
- public static final Object TOPOLOGY_TRANSACTIONAL_ID_SCHEMA = String.class;
-
- /**
- * A list of task hooks that are automatically added to every spout and bolt
- * in the topology. An example of when you'd do this is to add a hook that
- * integrates with your internal monitoring system. These hooks are
- * instantiated using the zero-arg constructor.
- */
- public static final String TOPOLOGY_AUTO_TASK_HOOKS = "topology.auto.task.hooks";
- public static final Object TOPOLOGY_AUTO_TASK_HOOKS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * The size of the Disruptor receive queue for each executor. Must be a
- * power of 2.
- */
- public static final String TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE = "topology.executor.receive.buffer.size";
- public static final Object TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
-
- /**
- * The maximum number of messages to batch from the thread receiving off the
- * network to the executor queues. Must be a power of 2.
- */
- public static final String TOPOLOGY_RECEIVER_BUFFER_SIZE = "topology.receiver.buffer.size";
- public static final Object TOPOLOGY_RECEIVER_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
-
- /**
- * The size of the Disruptor send queue for each executor. Must be a power
- * of 2.
- */
- public static final String TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE = "topology.executor.send.buffer.size";
- public static final Object TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
-
- /**
- * The size of the Disruptor transfer queue for each worker.
- */
- public static final String TOPOLOGY_TRANSFER_BUFFER_SIZE = "topology.transfer.buffer.size";
- public static final Object TOPOLOGY_TRANSFER_BUFFER_SIZE_SCHEMA = Number.class;
-
- /**
- * How often a tick tuple from the "__system" component and "__tick" stream
- * should be sent to tasks. Meant to be used as a component-specific
- * configuration.
- */
- public static final String TOPOLOGY_TICK_TUPLE_FREQ_SECS = "topology.tick.tuple.freq.secs";
- public static final Object TOPOLOGY_TICK_TUPLE_FREQ_SECS_SCHEMA = Number.class;
-
- /**
- * Configure the wait strategy used for internal queuing. Can be used to
- * tradeoff latency vs. throughput
- */
- public static final String TOPOLOGY_DISRUPTOR_WAIT_STRATEGY = "topology.disruptor.wait.strategy";
- public static final Object TOPOLOGY_DISRUPTOR_WAIT_STRATEGY_SCHEMA = String.class;
-
- /**
- * The size of the shared thread pool for worker tasks to make use of. The
- * thread pool can be accessed via the TopologyContext.
- */
- public static final String TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE = "topology.worker.shared.thread.pool.size";
- public static final Object TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE_SCHEMA = Number.class;
-
- /**
- * The interval in seconds to use for determining whether to throttle error
- * reported to Zookeeper. For example, an interval of 10 seconds with
- * topology.max.error.report.per.interval set to 5 will only allow 5 errors
- * to be reported to Zookeeper per task for every 10 second interval of
- * time.
- */
- public static final String TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS = "topology.error.throttle.interval.secs";
- public static final Object TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS_SCHEMA = Number.class;
-
- /**
- * See doc for TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS
- */
- public static final String TOPOLOGY_MAX_ERROR_REPORT_PER_INTERVAL = "topology.max.error.report.per.interval";
- public static final Object TOPOLOGY_MAX_ERROR_REPORT_PER_INTERVAL_SCHEMA = Number.class;
-
- /**
- * How often a batch can be emitted in a Trident topology.
- */
- public static final String TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS = "topology.trident.batch.emit.interval.millis";
- public static final Object TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS_SCHEMA = Number.class;
-
- /**
- * Name of the topology. This config is automatically set by Storm when the
- * topology is submitted.
- */
- public static final String TOPOLOGY_NAME = "topology.name";
- public static final Object TOPOLOGY_NAME_SCHEMA = String.class;
-
- /**
- * The principal who submitted a topology
- */
- public final static String TOPOLOGY_SUBMITTER_PRINCIPAL = "topology.submitter.principal";
- public static final Object TOPOLOGY_SUBMITTER_PRINCIPAL_SCHEMA = String.class;
-
- /**
- * The local user name of the user who submitted a topology.
- */
- public static final String TOPOLOGY_SUBMITTER_USER = "topology.submitter.user";
- public static final Object TOPOLOGY_SUBMITTER_USER_SCHEMA = String.class;
-
- /**
- * Array of components that scheduler should try to place on separate hosts.
- */
- public static final String TOPOLOGY_SPREAD_COMPONENTS = "topology.spread.components";
- public static final Object TOPOLOGY_SPREAD_COMPONENTS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * A list of IAutoCredentials that the topology should load and use.
- */
- public static final String TOPOLOGY_AUTO_CREDENTIALS = "topology.auto-credentials";
- public static final Object TOPOLOGY_AUTO_CREDENTIALS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * Max pending tuples in one ShellBolt
- */
- public static final String TOPOLOGY_SHELLBOLT_MAX_PENDING="topology.shellbolt.max.pending";
- public static final Object TOPOLOGY_SHELLBOLT_MAX_PENDING_SCHEMA = Number.class;
-
- /**
- * The root directory in ZooKeeper for metadata about TransactionalSpouts.
- */
- public static final String TRANSACTIONAL_ZOOKEEPER_ROOT = "transactional.zookeeper.root";
- public static final Object TRANSACTIONAL_ZOOKEEPER_ROOT_SCHEMA = String.class;
-
- /**
- * The list of zookeeper servers in which to keep the transactional state.
- * If null (which is default), will use storm.zookeeper.servers
- */
- public static final String TRANSACTIONAL_ZOOKEEPER_SERVERS = "transactional.zookeeper.servers";
- public static final Object TRANSACTIONAL_ZOOKEEPER_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
-
- /**
- * The port to use to connect to the transactional zookeeper servers. If
- * null (which is default), will use storm.zookeeper.port
- */
- public static final String TRANSACTIONAL_ZOOKEEPER_PORT = "transactional.zookeeper.port";
- public static final Object TRANSACTIONAL_ZOOKEEPER_PORT_SCHEMA = Number.class;
-
- /**
- * The number of threads that should be used by the zeromq context in each
- * worker process.
- */
- public static final String ZMQ_THREADS = "zmq.threads";
- public static final Object ZMQ_THREADS_SCHEMA = Number.class;
-
- /**
- * How long a connection should retry sending messages to a target host when
- * the connection is closed. This is an advanced configuration and can
- * almost certainly be ignored.
- */
- public static final String ZMQ_LINGER_MILLIS = "zmq.linger.millis";
- public static final Object ZMQ_LINGER_MILLIS_SCHEMA = Number.class;
-
- /**
- * The high water for the ZeroMQ push sockets used for networking. Use this
- * config to prevent buffer explosion on the networking layer.
- */
- public static final String ZMQ_HWM = "zmq.hwm";
- public static final Object ZMQ_HWM_SCHEMA = Number.class;
-
- /**
- * This value is passed to spawned JVMs (e.g., Nimbus, Supervisor, and
- * Workers) for the java.library.path value. java.library.path tells the JVM
- * where to look for native libraries. It is necessary to set this config
- * correctly since Storm uses the ZeroMQ and JZMQ native libs.
- */
- public static final String JAVA_LIBRARY_PATH = "java.library.path";
- public static final Object JAVA_LIBRARY_PATH_SCHEMA = String.class;
-
- /**
- * The path to use as the zookeeper dir when running a zookeeper server via
- * "storm dev-zookeeper". This zookeeper instance is only intended for
- * development; it is not a production grade zookeeper setup.
- */
- public static final String DEV_ZOOKEEPER_PATH = "dev.zookeeper.path";
- public static final Object DEV_ZOOKEEPER_PATH_SCHEMA = String.class;
-
- /**
- * A map from topology name to the number of machines that should be
- * dedicated for that topology. Set storm.scheduler to
- * backtype.storm.scheduler.IsolationScheduler to make use of the isolation
- * scheduler.
- */
- public static final String ISOLATION_SCHEDULER_MACHINES = "isolation.scheduler.machines";
- public static final Object ISOLATION_SCHEDULER_MACHINES_SCHEMA = List.class;
-
- /**
- * A map from the user name to the number of machines that should that user is allowed to use. Set storm.scheduler
- * to backtype.storm.scheduler.multitenant.MultitenantScheduler
- */
- public static final String MULTITENANT_SCHEDULER_USER_POOLS = "multitenant.scheduler.user.pools";
- public static final Object MULTITENANT_SCHEDULER_USER_POOLS_SCHEMA = Map.class;
-
- /**
- * The number of machines that should be used by this topology to isolate it from all others. Set storm.scheduler
- * to backtype.storm.scheduler.multitenant.MultitenantScheduler
- */
- public static final String TOPOLOGY_ISOLATED_MACHINES = "topology.isolate.machines";
- public static final Object TOPOLOGY_ISOLATED_MACHINES_SCHEMA = Number.class;
-
- public static final String WORKER_CLASSPATH = "worker.classpath";
- public static final Object WORKER_CLASSPATH_SCHEMA = String.class;
-
- /**
- * The serializer for communication between shell components and non-JVM
- * processes
- */
- public static final String TOPOLOGY_MULTILANG_SERIALIZER = "topology.multilang.serializer";
- public static final Object TOPOLOGY_MULTILANG_SERIALIZER_SCHEMA = String.class;
-
- /**
- * HDFS information, used to get the delegation token on behalf of the topology
- * submitter user and renew the tokens. see {@link backtype.storm.security.auth.hadoop.AutoHDFS}
- * kerberos principal name with realm should be provided.
- */
- public static final Object TOPOLOGY_HDFS_PRINCIPAL = "topology.hdfs.user";
- public static final Object TOPOLOGY_HDFS_PRINCIPAL_SCHEMA = String.class;
-
- /**
- * The HDFS URI to be used by AutoHDFS.java to grab the delegation token on topology
- * submitter user's behalf by the nimbus. If this is not provided the default URI provided
- * in the hdfs configuration files will be used.
- */
- public static final Object TOPOLOGY_HDFS_URI = "topology.hdfs.uri";
- public static final Object TOPOLOGY_HDFS_URI_SCHEMA = String.class;
-
- public static void setDebug(Map conf, boolean isOn) {
- conf.put(Config.TOPOLOGY_DEBUG, isOn);
- }
-
- public void setDebug(boolean isOn) {
- setDebug(this, isOn);
- }
-
- @Deprecated
- public void setOptimize(boolean isOn) {
- put(Config.TOPOLOGY_OPTIMIZE, isOn);
- }
-
- public static void setNumWorkers(Map conf, int workers) {
- conf.put(Config.TOPOLOGY_WORKERS, workers);
- }
-
- public void setNumWorkers(int workers) {
- setNumWorkers(this, workers);
- }
-
- public static void setNumAckers(Map conf, int numExecutors) {
- conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, numExecutors);
- }
-
- public void setNumAckers(int numExecutors) {
- setNumAckers(this, numExecutors);
- }
-
- public static void setMessageTimeoutSecs(Map conf, int secs) {
- conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, secs);
- }
-
- public void setMessageTimeoutSecs(int secs) {
- setMessageTimeoutSecs(this, secs);
- }
-
- public static void registerSerialization(Map conf, Class klass) {
- getRegisteredSerializations(conf).add(klass.getName());
- }
-
- public void registerSerialization(Class klass) {
- registerSerialization(this, klass);
- }
-
- public static void registerSerialization(Map conf, Class klass,
- Class<? extends Serializer> serializerClass) {
- Map<String, String> register = new HashMap<String, String>();
- register.put(klass.getName(), serializerClass.getName());
- getRegisteredSerializations(conf).add(register);
- }
-
- public void registerSerialization(Class klass,
- Class<? extends Serializer> serializerClass) {
- registerSerialization(this, klass, serializerClass);
- }
-
- public static void registerMetricsConsumer(Map conf, Class klass, Object argument, long parallelismHint) {
- HashMap m = new HashMap();
- m.put("class", klass.getCanonicalName());
- m.put("parallelism.hint", parallelismHint);
- m.put("argument", argument);
-
- List l = (List)conf.get(TOPOLOGY_METRICS_CONSUMER_REGISTER);
- if (l == null) { l = new ArrayList(); }
- l.add(m);
- conf.put(TOPOLOGY_METRICS_CONSUMER_REGISTER, l);
- }
-
- public void registerMetricsConsumer(Class klass, Object argument, long parallelismHint) {
- registerMetricsConsumer(this, klass, argument, parallelismHint);
- }
-
- public static void registerMetricsConsumer(Map conf, Class klass, long parallelismHint) {
- registerMetricsConsumer(conf, klass, null, parallelismHint);
- }
-
- public void registerMetricsConsumer(Class klass, long parallelismHint) {
- registerMetricsConsumer(this, klass, parallelismHint);
- }
-
- public static void registerMetricsConsumer(Map conf, Class klass) {
- registerMetricsConsumer(conf, klass, null, 1L);
- }
-
- public void registerMetricsConsumer(Class klass) {
- registerMetricsConsumer(this, klass);
- }
-
- public static void registerDecorator(Map conf,
- Class<? extends IKryoDecorator> klass) {
- getRegisteredDecorators(conf).add(klass.getName());
- }
-
- public void registerDecorator(Class<? extends IKryoDecorator> klass) {
- registerDecorator(this, klass);
- }
-
- public static void setKryoFactory(Map conf,
- Class<? extends IKryoFactory> klass) {
- conf.put(Config.TOPOLOGY_KRYO_FACTORY, klass.getName());
- }
-
- public void setKryoFactory(Class<? extends IKryoFactory> klass) {
- setKryoFactory(this, klass);
- }
-
- public static void setSkipMissingKryoRegistrations(Map conf, boolean skip) {
- conf.put(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS, skip);
- }
-
- public void setSkipMissingKryoRegistrations(boolean skip) {
- setSkipMissingKryoRegistrations(this, skip);
- }
-
- public static void setMaxTaskParallelism(Map conf, int max) {
- conf.put(Config.TOPOLOGY_MAX_TASK_PARALLELISM, max);
- }
-
- public void setMaxTaskParallelism(int max) {
- setMaxTaskParallelism(this, max);
- }
-
- public static void setMaxSpoutPending(Map conf, int max) {
- conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, max);
- }
-
- public void setMaxSpoutPending(int max) {
- setMaxSpoutPending(this, max);
- }
-
- public static void setStatsSampleRate(Map conf, double rate) {
- conf.put(Config.TOPOLOGY_STATS_SAMPLE_RATE, rate);
- }
-
- public void setStatsSampleRate(double rate) {
- setStatsSampleRate(this, rate);
- }
-
- public static void setFallBackOnJavaSerialization(Map conf, boolean fallback) {
- conf.put(Config.TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION, fallback);
- }
-
- public void setFallBackOnJavaSerialization(boolean fallback) {
- setFallBackOnJavaSerialization(this, fallback);
- }
-
- private static List getRegisteredSerializations(Map conf) {
- List ret;
- if (!conf.containsKey(Config.TOPOLOGY_KRYO_REGISTER)) {
- ret = new ArrayList();
- } else {
- ret = new ArrayList((List) conf.get(Config.TOPOLOGY_KRYO_REGISTER));
- }
- conf.put(Config.TOPOLOGY_KRYO_REGISTER, ret);
- return ret;
- }
-
- private static List getRegisteredDecorators(Map conf) {
- List ret;
- if (!conf.containsKey(Config.TOPOLOGY_KRYO_DECORATORS)) {
- ret = new ArrayList();
- } else {
- ret = new ArrayList(
- (List) conf.get(Config.TOPOLOGY_KRYO_DECORATORS));
- }
- conf.put(Config.TOPOLOGY_KRYO_DECORATORS, ret);
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/ConfigValidation.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/ConfigValidation.java b/jstorm-client/src/main/java/backtype/storm/ConfigValidation.java
deleted file mode 100644
index 0825eb9..0000000
--- a/jstorm-client/src/main/java/backtype/storm/ConfigValidation.java
+++ /dev/null
@@ -1,100 +0,0 @@
-package backtype.storm;
-
-import java.util.Map;
-
-/**
- * Provides functionality for validating configuration fields.
- */
-public class ConfigValidation {
-
- /**
- * Declares methods for validating configuration values.
- */
- public static interface FieldValidator {
- /**
- * Validates the given field.
- *
- * @param name
- * the name of the field.
- * @param field
- * The field to be validated.
- * @throws IllegalArgumentException
- * if the field fails validation.
- */
- public void validateField(String name, Object field)
- throws IllegalArgumentException;
- }
-
- /**
- * Returns a new FieldValidator for a List of the given Class.
- *
- * @param cls
- * the Class of elements composing the list
- * @return a FieldValidator for a list of the given class
- */
- static FieldValidator FieldListValidatorFactory(final Class cls) {
- return new FieldValidator() {
- @Override
- public void validateField(String name, Object field)
- throws IllegalArgumentException {
- if (field == null) {
- // A null value is acceptable.
- return;
- }
- if (field instanceof Iterable) {
- for (Object e : (Iterable) field) {
- if (!cls.isInstance(e)) {
- throw new IllegalArgumentException(
- "Each element of the list " + name
- + " must be a " + cls.getName()
- + ".");
- }
- }
- return;
- }
- throw new IllegalArgumentException("Field " + name
- + " must be an Iterable of " + cls.getName());
- }
- };
- }
-
- /**
- * Validates a list of Numbers.
- */
- public static Object NumbersValidator = FieldListValidatorFactory(Number.class);
-
- /**
- * Validates is a list of Strings.
- */
- public static Object StringsValidator = FieldListValidatorFactory(String.class);
-
- /**
- * Validates is a list of Maps.
- */
- public static Object MapsValidator = FieldListValidatorFactory(Map.class);
-
- /**
- * Validates a power of 2.
- */
- public static Object PowerOf2Validator = new FieldValidator() {
- @Override
- public void validateField(String name, Object o)
- throws IllegalArgumentException {
- if (o == null) {
- // A null value is acceptable.
- return;
- }
- final long i;
- if (o instanceof Number
- && (i = ((Number) o).longValue()) == ((Number) o)
- .doubleValue()) {
- // Test whether the integer is a power of 2.
- if (i > 0 && (i & (i - 1)) == 0) {
- return;
- }
- }
- throw new IllegalArgumentException("Field " + name
- + " must be a power of 2.");
- }
- };
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/Constants.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/Constants.java b/jstorm-client/src/main/java/backtype/storm/Constants.java
deleted file mode 100644
index b657ee6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/Constants.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package backtype.storm;
-
-import backtype.storm.coordination.CoordinatedBolt;
-import clojure.lang.RT;
-
-public class Constants {
- public static final String COORDINATED_STREAM_ID = CoordinatedBolt.class
- .getName() + "/coord-stream";
-
- public static final long SYSTEM_TASK_ID = -1;
- public static final Object SYSTEM_EXECUTOR_ID = RT.readString("[-1 -1]");
- public static final String SYSTEM_COMPONENT_ID = "__system";
- public static final String SYSTEM_TICK_STREAM_ID = "__tick";
- public static final String METRICS_COMPONENT_ID_PREFIX = "__metrics";
- public static final String METRICS_STREAM_ID = "__metrics";
- public static final String METRICS_TICK_STREAM_ID = "__metrics_tick";
- public static final String CREDENTIALS_CHANGED_STREAM_ID = "__credentials";
-
- public static final String JSTORM_CONF_DIR = "JSTORM_CONF_DIR";
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/GenericOptionsParser.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/GenericOptionsParser.java b/jstorm-client/src/main/java/backtype/storm/GenericOptionsParser.java
deleted file mode 100644
index 8ca7a1d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/GenericOptionsParser.java
+++ /dev/null
@@ -1,296 +0,0 @@
-package backtype.storm;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStreamReader;
-import java.io.IOException;
-import java.io.PrintStream;
-import java.nio.charset.Charset;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.LinkedHashMap;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.yaml.snakeyaml.Yaml;
-
-/**
- * <code>GenericOptionsParser</code> is a utility to parse command line
- * arguments generic to Storm.
- *
- * <code>GenericOptionsParser</code> recognizes several standard command line
- * arguments, enabling applications to easily specify additional jar files,
- * configuration resources, data files etc.
- *
- * <h4 id="GenericOptions">Generic Options</h4>
- *
- * <p>
- * The supported generic options are:
- * </p>
- * <p>
- * <blockquote>
- *
- * <pre>
- * -conf <conf.xml> load configurations from
- * <conf.xml>
- * -conf <conf.yaml> load configurations from
- * <conf.yaml>
- * -D <key=value> set <key> in configuration to
- * <value> (preserve value's type)
- * -libjars <comma separated list of jars> specify comma separated jars to be
- * used by the submitted topology
- * </pre>
- *
- * </blockquote>
- * </p>
- *
- * <b>Note:</b> The XML configuration file specified by <code>-conf</code> shall
- * be readable by Hadoop's <a href=
- * "http://hadoop.apache.org/docs/current/api/org/apache/hadoop/conf/Configuration.html"
- * ><code>Configuration</code></a> class. Also note that all configuration
- * values of an XML file will be treated as strings, and <b>not as specific
- * types</b>.
- *
- * <p>
- * The general command line syntax is:
- * </p>
- * <p>
- * <tt><pre>
- * storm jar app.jar [genericOptions] [commandOptions]
- * </pre></tt>
- * </p>
- *
- * <p>
- * Generic command line arguments <strong>might</strong> modify
- * <code>Config</code> objects, given to constructors.
- * </p>
- *
- * <h4>Configuration priority</h4>
- *
- * The following list defines the priorities of different configuration sources,
- * in ascending order. Thus, if a configuration appears in more than one of
- * them, only the last one will take effect.
- *
- * <ul>
- * <li> <code>defaults.yaml</code> in classpath.
- * <li> <code>storm.yaml</code> in classpath.
- * <li>Configurations from files specified with the <code>-conf</code> option,
- * in the order of appearance.
- * <li>Configurations defined with the <code>-D</code> option, in order of
- * appearance.
- * </ul>
- *
- * <p>
- * The functionality is implemented using Commons CLI.
- * </p>
- *
- * @see Tool
- * @see ToolRunner
- */
-
-public class GenericOptionsParser {
- static final Logger LOG = LoggerFactory
- .getLogger(GenericOptionsParser.class);
-
- static final Charset UTF8 = Charset.forName("UTF-8");
-
- public static final String TOPOLOGY_LIB_PATH = "topology.lib.path";
-
- public static final String TOPOLOGY_LIB_NAME = "topology.lib.name";
-
- Config conf;
-
- CommandLine commandLine;
-
- // Order in this map is important for these purposes:
- // - configuration priority
- static final LinkedHashMap<String, OptionProcessor> optionProcessors = new LinkedHashMap<String, OptionProcessor>();
-
- public GenericOptionsParser(Config conf, String[] args)
- throws ParseException {
- this(conf, new Options(), args);
- }
-
- public GenericOptionsParser(Config conf, Options options, String[] args)
- throws ParseException {
- this.conf = conf;
- parseGeneralOptions(options, conf, args);
- }
-
- public String[] getRemainingArgs() {
- return commandLine.getArgs();
- }
-
- public Config getConfiguration() {
- return conf;
- }
-
- static Options buildGeneralOptions(Options opts) {
- Options r = new Options();
-
- for (Object o : opts.getOptions())
- r.addOption((Option) o);
-
- Option libjars = OptionBuilder
- .withArgName("paths")
- .hasArg()
- .withDescription(
- "comma separated jars to be used by the submitted topology")
- .create("libjars");
- r.addOption(libjars);
- optionProcessors.put("libjars", new LibjarsProcessor());
-
- Option conf = OptionBuilder.withArgName("configuration file").hasArg()
- .withDescription("an application configuration file")
- .create("conf");
- r.addOption(conf);
- optionProcessors.put("conf", new ConfFileProcessor());
-
- // Must come after `conf': this option is of higher priority
- Option extraConfig = OptionBuilder.withArgName("D").hasArg()
- .withDescription("extra configurations (preserving types)")
- .create("D");
- r.addOption(extraConfig);
- optionProcessors.put("D", new ExtraConfigProcessor());
-
- return r;
- }
-
- void parseGeneralOptions(Options opts, Config conf, String[] args)
- throws ParseException {
- opts = buildGeneralOptions(opts);
- CommandLineParser parser = new GnuParser();
- commandLine = parser.parse(opts, args, true);
- processGeneralOptions(conf, commandLine);
- }
-
- void processGeneralOptions(Config conf, CommandLine commandLine)
- throws ParseException {
- for (Map.Entry<String, OptionProcessor> e : optionProcessors.entrySet())
- if (commandLine.hasOption(e.getKey()))
- e.getValue().process(conf, commandLine);
- }
-
- static List<File> validateFiles(String pathList) throws IOException {
- List<File> l = new ArrayList<File>();
-
- for (String s : pathList.split(",")) {
- File file = new File(s);
- if (!file.exists())
- throw new FileNotFoundException("File `"
- + file.getAbsolutePath() + "' does not exist");
-
- l.add(file);
- }
-
- return l;
- }
-
- public static void printGenericCommandUsage(PrintStream out) {
- String[] strs = new String[] {
- "Generic options supported are",
- " -conf <conf.xml> load configurations from",
- " <conf.xml>",
- " -conf <conf.yaml> load configurations from",
- " <conf.yaml>",
- " -D <key>=<value> set <key> in configuration",
- " to <value> (preserve value's type)",
- " -libjars <comma separated list of jars> specify comma separated",
- " jars to be used by",
- " the submitted topology", };
- for (String s : strs)
- out.println(s);
- }
-
- static interface OptionProcessor {
- public void process(Config conf, CommandLine commandLine)
- throws ParseException;
- }
-
- static class LibjarsProcessor implements OptionProcessor {
- @Override
- public void process(Config conf, CommandLine commandLine)
- throws ParseException {
- try {
- List<File> jarFiles = validateFiles(commandLine
- .getOptionValue("libjars"));
- Map<String, String> jars = new HashMap<String, String>(jarFiles.size());
- List<String> names = new ArrayList<String>(jarFiles.size());
- for (File f : jarFiles) {
- jars.put(f.getName(), f.getAbsolutePath());
- names.add(f.getName());
- }
- conf.put(TOPOLOGY_LIB_PATH, jars);
- conf.put(TOPOLOGY_LIB_NAME, names);
-
- } catch (IOException e) {
- throw new ParseException(e.getMessage());
- }
- }
- }
-
- static class ExtraConfigProcessor implements OptionProcessor {
- static final Yaml yaml = new Yaml();
-
- @Override
- public void process(Config conf, CommandLine commandLine)
- throws ParseException {
- for (String s : commandLine.getOptionValues("D")) {
- String[] keyval = s.split("=", 2);
- if (keyval.length != 2)
- throw new ParseException("Invalid option value `" + s + "'");
-
- conf.putAll((Map) yaml.load(keyval[0] + ": " + keyval[1]));
- }
- }
- }
-
- static class ConfFileProcessor implements OptionProcessor {
- static final Yaml yaml = new Yaml();
-
- static Map loadYamlConf(String f) throws IOException {
- InputStreamReader reader = null;
- try {
- FileInputStream fis = new FileInputStream(f);
- reader = new InputStreamReader(fis, UTF8);
- return (Map) yaml.load(reader);
- } finally {
- if (reader != null)
- reader.close();
- }
- }
-
- static Map loadConf(String f) throws IOException {
- if (f.endsWith(".yaml"))
- return loadYamlConf(f);
- throw new IOException("Unknown configuration file type: " + f
- + " does not end with either .yaml");
- }
-
- @Override
- public void process(Config conf, CommandLine commandLine)
- throws ParseException {
- try {
- for (String f : commandLine.getOptionValues("conf")) {
- Map m = loadConf(f);
- if (m == null)
- throw new ParseException("Empty configuration file "
- + f);
- conf.putAll(m);
- }
- } catch (IOException e) {
- throw new ParseException(e.getMessage());
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/ILocalCluster.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/ILocalCluster.java b/jstorm-client/src/main/java/backtype/storm/ILocalCluster.java
deleted file mode 100644
index 03d9054..0000000
--- a/jstorm-client/src/main/java/backtype/storm/ILocalCluster.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm;
-
-import backtype.storm.generated.AlreadyAliveException;
-import backtype.storm.generated.ClusterSummary;
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.KillOptions;
-import backtype.storm.generated.SubmitOptions;
-import backtype.storm.generated.NotAliveException;
-import backtype.storm.generated.RebalanceOptions;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.generated.TopologyInfo;
-
-import java.util.Map;
-
-public interface ILocalCluster {
- void submitTopology(String topologyName, Map conf, StormTopology topology)
- throws AlreadyAliveException, InvalidTopologyException;
-
- void submitTopologyWithOpts(String topologyName, Map conf,
- StormTopology topology, SubmitOptions submitOpts)
- throws AlreadyAliveException, InvalidTopologyException;
-
- void killTopology(String topologyName) throws NotAliveException;
-
- void killTopologyWithOpts(String name, KillOptions options)
- throws NotAliveException;
-
- void activate(String topologyName) throws NotAliveException;
-
- void deactivate(String topologyName) throws NotAliveException;
-
- void rebalance(String name, RebalanceOptions options)
- throws NotAliveException;
-
- void shutdown();
-
- String getTopologyConf(String id);
-
- StormTopology getTopology(String id);
-
- ClusterSummary getClusterInfo();
-
- TopologyInfo getTopologyInfo(String id);
-
- Map getState();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/ILocalDRPC.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/ILocalDRPC.java b/jstorm-client/src/main/java/backtype/storm/ILocalDRPC.java
deleted file mode 100644
index 80fc842..0000000
--- a/jstorm-client/src/main/java/backtype/storm/ILocalDRPC.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package backtype.storm;
-
-import backtype.storm.daemon.Shutdownable;
-import backtype.storm.generated.DistributedRPC;
-import backtype.storm.generated.DistributedRPCInvocations;
-
-public interface ILocalDRPC extends DistributedRPC.Iface,
- DistributedRPCInvocations.Iface, Shutdownable {
- public String getServiceId();
-}
[59/60] [abbrv] storm git commit: remove .gitmodules
Posted by pt...@apache.org.
remove .gitmodules
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/5744ac3f
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/5744ac3f
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/5744ac3f
Branch: refs/heads/jstorm-import
Commit: 5744ac3f1c761f2cd3b4013f29381abfd2ccecf0
Parents: e8f64d5
Author: P. Taylor Goetz <pt...@gmail.com>
Authored: Thu Nov 5 15:27:40 2015 -0500
Committer: P. Taylor Goetz <pt...@gmail.com>
Committed: Thu Nov 5 15:27:40 2015 -0500
----------------------------------------------------------------------
.gitmodules | 0
1 file changed, 0 insertions(+), 0 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/storm/blob/5744ac3f/.gitmodules
----------------------------------------------------------------------
diff --git a/.gitmodules b/.gitmodules
deleted file mode 100644
index e69de29..0000000
[11/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/Config.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/Config.java b/jstorm-core/src/main/java/backtype/storm/Config.java
new file mode 100644
index 0000000..4273908
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/Config.java
@@ -0,0 +1,1579 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import backtype.storm.ConfigValidation;
+import backtype.storm.serialization.IKryoDecorator;
+import backtype.storm.serialization.IKryoFactory;
+
+import com.esotericsoftware.kryo.Serializer;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Topology configs are specified as a plain old map. This class provides a
+ * convenient way to create a topology config map by providing setter methods for
+ * all the configs that can be set. It also makes it easier to do things like add
+ * serializations.
+ *
+ * <p>This class also provides constants for all the configurations possible on
+ * a Storm cluster and Storm topology. Each constant is paired with a schema
+ * that defines the validity criterion of the corresponding field. Default
+ * values for these configs can be found in defaults.yaml.</p>
+ *
+ * <p>Note that you may put other configurations in any of the configs. Storm
+ * will ignore anything it doesn't recognize, but your topologies are free to make
+ * use of them by reading them in the prepare method of Bolts or the open method of
+ * Spouts.</p>
+ */
+public class Config extends HashMap<String, Object> {
+ //DO NOT CHANGE UNLESS WE ADD IN STATE NOT STORED IN THE PARENT CLASS
+ private static final long serialVersionUID = -1550278723792864455L;
+
+ /**
+ * This is part of a temporary workaround to a ZK bug, it is the 'scheme:acl' for
+ * the user Nimbus and Supervisors use to authenticate with ZK.
+ */
+ public static final String STORM_ZOOKEEPER_SUPERACL = "storm.zookeeper.superACL";
+ public static final Object STORM_ZOOKEEPER_SUPERACL_SCHEMA = String.class;
+
+ /**
+ * The transporter for communication among Storm tasks
+ */
+ public static final String STORM_MESSAGING_TRANSPORT = "storm.messaging.transport";
+ public static final Object STORM_MESSAGING_TRANSPORT_SCHEMA = String.class;
+
+ /**
+ * Netty based messaging: The buffer size for send/recv buffer
+ */
+ public static final String STORM_MESSAGING_NETTY_BUFFER_SIZE = "storm.messaging.netty.buffer_size";
+ public static final Object STORM_MESSAGING_NETTY_BUFFER_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: Sets the backlog value to specify when the channel binds to a local address
+ */
+ public static final String STORM_MESSAGING_NETTY_SOCKET_BACKLOG = "storm.messaging.netty.socket.backlog";
+ public static final Object STORM_MESSAGING_NETTY_SOCKET_BACKLOG_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: The max # of retries that a peer will perform when a remote is not accessible
+ */
+ public static final String STORM_MESSAGING_NETTY_MAX_RETRIES = "storm.messaging.netty.max_retries";
+ public static final Object STORM_MESSAGING_NETTY_MAX_RETRIES_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: The min # of milliseconds that a peer will wait.
+ */
+ public static final String STORM_MESSAGING_NETTY_MIN_SLEEP_MS = "storm.messaging.netty.min_wait_ms";
+ public static final Object STORM_MESSAGING_NETTY_MIN_SLEEP_MS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: The max # of milliseconds that a peer will wait.
+ */
+ public static final String STORM_MESSAGING_NETTY_MAX_SLEEP_MS = "storm.messaging.netty.max_wait_ms";
+ public static final Object STORM_MESSAGING_NETTY_MAX_SLEEP_MS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: The # of worker threads for the server.
+ */
+ public static final String STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS = "storm.messaging.netty.server_worker_threads";
+ public static final Object STORM_MESSAGING_NETTY_SERVER_WORKER_THREADS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: The # of worker threads for the client.
+ */
+ public static final String STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS = "storm.messaging.netty.client_worker_threads";
+ public static final Object STORM_MESSAGING_NETTY_CLIENT_WORKER_THREADS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * If the Netty messaging layer is busy, the Netty client will try to batch message as more as possible up to the size of STORM_NETTY_MESSAGE_BATCH_SIZE bytes
+ */
+ public static final String STORM_NETTY_MESSAGE_BATCH_SIZE = "storm.messaging.netty.transfer.batch.size";
+ public static final Object STORM_NETTY_MESSAGE_BATCH_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * We check with this interval that whether the Netty channel is writable and try to write pending messages
+ */
+ public static final String STORM_NETTY_FLUSH_CHECK_INTERVAL_MS = "storm.messaging.netty.flush.check.interval.ms";
+ public static final Object STORM_NETTY_FLUSH_CHECK_INTERVAL_MS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Netty based messaging: Is authentication required for Netty messaging from client worker process to server worker process.
+ */
+ public static final String STORM_MESSAGING_NETTY_AUTHENTICATION = "storm.messaging.netty.authentication";
+ public static final Object STORM_MESSAGING_NETTY_AUTHENTICATION_SCHEMA = Boolean.class;
+
+ /**
+ * The delegate for serializing metadata, should be used for serialized objects stored in zookeeper and on disk.
+ * This is NOT used for compressing serialized tuples sent between topologies.
+ */
+ public static final String STORM_META_SERIALIZATION_DELEGATE = "storm.meta.serialization.delegate";
+ public static final Object STORM_META_SERIALIZATION_DELEGATE_SCHEMA = String.class;
+
+ /**
+ * A list of hosts of ZooKeeper servers used to manage the cluster.
+ */
+ public static final String STORM_ZOOKEEPER_SERVERS = "storm.zookeeper.servers";
+ public static final Object STORM_ZOOKEEPER_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * The port Storm will use to connect to each of the ZooKeeper servers.
+ */
+ public static final String STORM_ZOOKEEPER_PORT = "storm.zookeeper.port";
+ public static final Object STORM_ZOOKEEPER_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * A directory on the local filesystem used by Storm for any local
+ * filesystem usage it needs. The directory must exist and the Storm daemons must
+ * have permission to read/write from this location.
+ */
+ public static final String STORM_LOCAL_DIR = "storm.local.dir";
+ public static final Object STORM_LOCAL_DIR_SCHEMA = String.class;
+
+ /**
+ * A global task scheduler used to assign topologies's tasks to supervisors' wokers.
+ *
+ * If this is not set, a default system scheduler will be used.
+ */
+ public static final String STORM_SCHEDULER = "storm.scheduler";
+ public static final Object STORM_SCHEDULER_SCHEMA = String.class;
+
+ /**
+ * The mode this Storm cluster is running in. Either "distributed" or "local".
+ */
+ public static final String STORM_CLUSTER_MODE = "storm.cluster.mode";
+ public static final Object STORM_CLUSTER_MODE_SCHEMA = String.class;
+
+ /**
+ * The hostname the supervisors/workers should report to nimbus. If unset, Storm will
+ * get the hostname to report by calling <code>InetAddress.getLocalHost().getCanonicalHostName()</code>.
+ *
+ * You should set this config when you dont have a DNS which supervisors/workers
+ * can utilize to find each other based on hostname got from calls to
+ * <code>InetAddress.getLocalHost().getCanonicalHostName()</code>.
+ */
+ public static final String STORM_LOCAL_HOSTNAME = "storm.local.hostname";
+ public static final Object STORM_LOCAL_HOSTNAME_SCHEMA = String.class;
+
+ /**
+ * The plugin that will convert a principal to a local user.
+ */
+ public static final String STORM_PRINCIPAL_TO_LOCAL_PLUGIN = "storm.principal.tolocal";
+ public static final Object STORM_PRINCIPAL_TO_LOCAL_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * The plugin that will provide user groups service
+ */
+ public static final String STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN = "storm.group.mapping.service";
+ public static final Object STORM_GROUP_MAPPING_SERVICE_PROVIDER_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * Max no.of seconds group mapping service will cache user groups
+ */
+ public static final String STORM_GROUP_MAPPING_SERVICE_CACHE_DURATION_SECS = "storm.group.mapping.service.cache.duration.secs";
+ public static final Object STORM_GROUP_MAPPING_SERVICE_CACHE_DURATION_SECS_SCHEMA = Number.class;
+
+ /**
+ * The default transport plug-in for Thrift client/server communication
+ */
+ public static final String STORM_THRIFT_TRANSPORT_PLUGIN = "storm.thrift.transport";
+ public static final Object STORM_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * The serializer class for ListDelegate (tuple payload).
+ * The default serializer will be ListDelegateSerializer
+ */
+ public static final String TOPOLOGY_TUPLE_SERIALIZER = "topology.tuple.serializer";
+ public static final Object TOPOLOGY_TUPLE_SERIALIZER_SCHEMA = String.class;
+
+ /**
+ * Try to serialize all tuples, even for local transfers. This should only be used
+ * for testing, as a sanity check that all of your tuples are setup properly.
+ */
+ public static final String TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE = "topology.testing.always.try.serialize";
+ public static final Object TOPOLOGY_TESTING_ALWAYS_TRY_SERIALIZE_SCHEMA = Boolean.class;
+
+ /**
+ * Whether or not to use ZeroMQ for messaging in local mode. If this is set
+ * to false, then Storm will use a pure-Java messaging system. The purpose
+ * of this flag is to make it easy to run Storm in local mode by eliminating
+ * the need for native dependencies, which can be difficult to install.
+ *
+ * Defaults to false.
+ */
+ public static final String STORM_LOCAL_MODE_ZMQ = "storm.local.mode.zmq";
+ public static final Object STORM_LOCAL_MODE_ZMQ_SCHEMA = Boolean.class;
+
+ /**
+ * The root location at which Storm stores data in ZooKeeper.
+ */
+ public static final String STORM_ZOOKEEPER_ROOT = "storm.zookeeper.root";
+ public static final Object STORM_ZOOKEEPER_ROOT_SCHEMA = String.class;
+
+ /**
+ * The session timeout for clients to ZooKeeper.
+ */
+ public static final String STORM_ZOOKEEPER_SESSION_TIMEOUT = "storm.zookeeper.session.timeout";
+ public static final Object STORM_ZOOKEEPER_SESSION_TIMEOUT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The connection timeout for clients to ZooKeeper.
+ */
+ public static final String STORM_ZOOKEEPER_CONNECTION_TIMEOUT = "storm.zookeeper.connection.timeout";
+ public static final Object STORM_ZOOKEEPER_CONNECTION_TIMEOUT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The number of times to retry a Zookeeper operation.
+ */
+ public static final String STORM_ZOOKEEPER_RETRY_TIMES="storm.zookeeper.retry.times";
+ public static final Object STORM_ZOOKEEPER_RETRY_TIMES_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The interval between retries of a Zookeeper operation.
+ */
+ public static final String STORM_ZOOKEEPER_RETRY_INTERVAL="storm.zookeeper.retry.interval";
+ public static final Object STORM_ZOOKEEPER_RETRY_INTERVAL_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The ceiling of the interval between retries of a Zookeeper operation.
+ */
+ public static final String STORM_ZOOKEEPER_RETRY_INTERVAL_CEILING="storm.zookeeper.retry.intervalceiling.millis";
+ public static final Object STORM_ZOOKEEPER_RETRY_INTERVAL_CEILING_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The cluster Zookeeper authentication scheme to use, e.g. "digest". Defaults to no authentication.
+ */
+ public static final String STORM_ZOOKEEPER_AUTH_SCHEME="storm.zookeeper.auth.scheme";
+ public static final Object STORM_ZOOKEEPER_AUTH_SCHEME_SCHEMA = String.class;
+
+ /**
+ * A string representing the payload for cluster Zookeeper authentication.
+ * It gets serialized using UTF-8 encoding during authentication.
+ * Note that if this is set to something with a secret (as when using
+ * digest authentication) then it should only be set in the
+ * storm-cluster-auth.yaml file.
+ * This file storm-cluster-auth.yaml should then be protected with
+ * appropriate permissions that deny access from workers.
+ */
+ public static final String STORM_ZOOKEEPER_AUTH_PAYLOAD="storm.zookeeper.auth.payload";
+ public static final Object STORM_ZOOKEEPER_AUTH_PAYLOAD_SCHEMA = String.class;
+
+ /**
+ * The topology Zookeeper authentication scheme to use, e.g. "digest". Defaults to no authentication.
+ */
+ public static final String STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME="storm.zookeeper.topology.auth.scheme";
+ public static final Object STORM_ZOOKEEPER_TOPOLOGY_AUTH_SCHEME_SCHEMA = String.class;
+
+ /**
+ * A string representing the payload for topology Zookeeper authentication. It gets serialized using UTF-8 encoding during authentication.
+ */
+ public static final String STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD="storm.zookeeper.topology.auth.payload";
+ public static final Object STORM_ZOOKEEPER_TOPOLOGY_AUTH_PAYLOAD_SCHEMA = String.class;
+
+ /**
+ * The id assigned to a running topology. The id is the storm name with a unique nonce appended.
+ */
+ public static final String TOPOLOGY_ID = "topology.id";
+ public static final Object TOPOLOGY_ID_SCHEMA = String.class;
+ public static final String STORM_ID = TOPOLOGY_ID;
+ public static final Object STORM_ID_SCHEMA = String.class;
+ /**
+ * The number of times to retry a Nimbus operation.
+ */
+ public static final String STORM_NIMBUS_RETRY_TIMES="storm.nimbus.retry.times";
+ public static final Object STORM_NIMBUS_RETRY_TIMES_SCHEMA = Number.class;
+
+ /**
+ * The starting interval between exponential backoff retries of a Nimbus operation.
+ */
+ public static final String STORM_NIMBUS_RETRY_INTERVAL="storm.nimbus.retry.interval.millis";
+ public static final Object STORM_NIMBUS_RETRY_INTERVAL_SCHEMA = Number.class;
+
+ /**
+ * The ceiling of the interval between retries of a client connect to Nimbus operation.
+ */
+ public static final String STORM_NIMBUS_RETRY_INTERVAL_CEILING="storm.nimbus.retry.intervalceiling.millis";
+ public static final Object STORM_NIMBUS_RETRY_INTERVAL_CEILING_SCHEMA = Number.class;
+
+ /**
+ * The host that the master server is running on.
+ */
+ public static final String NIMBUS_HOST = "nimbus.host";
+ public static final Object NIMBUS_HOST_SCHEMA = String.class;
+
+ /**
+ * The Nimbus transport plug-in for Thrift client/server communication
+ */
+ public static final String NIMBUS_THRIFT_TRANSPORT_PLUGIN = "nimbus.thrift.transport";
+ public static final Object NIMBUS_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * Which port the Thrift interface of Nimbus should run on. Clients should
+ * connect to this port to upload jars and submit topologies.
+ */
+ public static final String NIMBUS_THRIFT_PORT = "nimbus.thrift.port";
+ public static final Object NIMBUS_THRIFT_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The number of threads that should be used by the nimbus thrift server.
+ */
+ public static final String NIMBUS_THRIFT_THREADS = "nimbus.thrift.threads";
+ public static final Object NIMBUS_THRIFT_THREADS_SCHEMA = Number.class;
+
+ /**
+ * A list of users that are cluster admins and can run any command. To use this set
+ * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String NIMBUS_ADMINS = "nimbus.admins";
+ public static final Object NIMBUS_ADMINS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of users that are the only ones allowed to run user operation on storm cluster.
+ * To use this set nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String NIMBUS_USERS = "nimbus.users";
+ public static final Object NIMBUS_USERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of groups , users belong to these groups are the only ones allowed to run user operation on storm cluster.
+ * To use this set nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String NIMBUS_GROUPS = "nimbus.groups";
+ public static final Object NIMBUS_GROUPS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of users that run the supervisors and should be authorized to interact with
+ * nimbus as a supervisor would. To use this set
+ * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String NIMBUS_SUPERVISOR_USERS = "nimbus.supervisor.users";
+ public static final Object NIMBUS_SUPERVISOR_USERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * The maximum buffer size thrift should use when reading messages.
+ */
+ public static final String NIMBUS_THRIFT_MAX_BUFFER_SIZE = "nimbus.thrift.max_buffer_size";
+ public static final Object NIMBUS_THRIFT_MAX_BUFFER_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * This parameter is used by the storm-deploy project to configure the
+ * jvm options for the nimbus daemon.
+ */
+ public static final String NIMBUS_CHILDOPTS = "nimbus.childopts";
+ public static final Object NIMBUS_CHILDOPTS_SCHEMA = String.class;
+
+
+ /**
+ * How long without heartbeating a task can go before nimbus will consider the
+ * task dead and reassign it to another location.
+ */
+ public static final String NIMBUS_TASK_TIMEOUT_SECS = "nimbus.task.timeout.secs";
+ public static final Object NIMBUS_TASK_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * How often nimbus should wake up to check heartbeats and do reassignments. Note
+ * that if a machine ever goes down Nimbus will immediately wake up and take action.
+ * This parameter is for checking for failures when there's no explicit event like that
+ * occuring.
+ */
+ public static final String NIMBUS_MONITOR_FREQ_SECS = "nimbus.monitor.freq.secs";
+ public static final Object NIMBUS_MONITOR_FREQ_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How often nimbus should wake the cleanup thread to clean the inbox.
+ * @see NIMBUS_INBOX_JAR_EXPIRATION_SECS
+ */
+ public static final String NIMBUS_CLEANUP_INBOX_FREQ_SECS = "nimbus.cleanup.inbox.freq.secs";
+ public static final Object NIMBUS_CLEANUP_INBOX_FREQ_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The length of time a jar file lives in the inbox before being deleted by the cleanup thread.
+ *
+ * Probably keep this value greater than or equal to NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS.
+ * Note that the time it takes to delete an inbox jar file is going to be somewhat more than
+ * NIMBUS_CLEANUP_INBOX_JAR_EXPIRATION_SECS (depending on how often NIMBUS_CLEANUP_FREQ_SECS
+ * is set to).
+ * @see NIMBUS_CLEANUP_FREQ_SECS
+ */
+ public static final String NIMBUS_INBOX_JAR_EXPIRATION_SECS = "nimbus.inbox.jar.expiration.secs";
+ public static final Object NIMBUS_INBOX_JAR_EXPIRATION_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How long before a supervisor can go without heartbeating before nimbus considers it dead
+ * and stops assigning new work to it.
+ */
+ public static final String NIMBUS_SUPERVISOR_TIMEOUT_SECS = "nimbus.supervisor.timeout.secs";
+ public static final Object NIMBUS_SUPERVISOR_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * A special timeout used when a task is initially launched. During launch, this is the timeout
+ * used until the first heartbeat, overriding nimbus.task.timeout.secs.
+ *
+ * <p>A separate timeout exists for launch because there can be quite a bit of overhead
+ * to launching new JVM's and configuring them.</p>
+ */
+ public static final String NIMBUS_TASK_LAUNCH_SECS = "nimbus.task.launch.secs";
+ public static final Object NIMBUS_TASK_LAUNCH_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Whether or not nimbus should reassign tasks if it detects that a task goes down.
+ * Defaults to true, and it's not recommended to change this value.
+ */
+ public static final String NIMBUS_REASSIGN = "nimbus.reassign";
+ public static final Object NIMBUS_REASSIGN_SCHEMA = Boolean.class;
+
+ /**
+ * During upload/download with the master, how long an upload or download connection is idle
+ * before nimbus considers it dead and drops the connection.
+ */
+ public static final String NIMBUS_FILE_COPY_EXPIRATION_SECS = "nimbus.file.copy.expiration.secs";
+ public static final Object NIMBUS_FILE_COPY_EXPIRATION_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * A custom class that implements ITopologyValidator that is run whenever a
+ * topology is submitted. Can be used to provide business-specific logic for
+ * whether topologies are allowed to run or not.
+ */
+ public static final String NIMBUS_TOPOLOGY_VALIDATOR = "nimbus.topology.validator";
+ public static final Object NIMBUS_TOPOLOGY_VALIDATOR_SCHEMA = String.class;
+
+ /**
+ * Class name for authorization plugin for Nimbus
+ */
+ public static final String NIMBUS_AUTHORIZER = "nimbus.authorizer";
+ public static final Object NIMBUS_AUTHORIZER_SCHEMA = String.class;
+
+
+ /**
+ * Impersonation user ACL config entries.
+ */
+ public static final String NIMBUS_IMPERSONATION_AUTHORIZER = "nimbus.impersonation.authorizer";
+ public static final Object NIMBUS_IMPERSONATION_AUTHORIZER_SCHEMA = String.class;
+
+
+ /**
+ * Impersonation user ACL config entries.
+ */
+ public static final String NIMBUS_IMPERSONATION_ACL = "nimbus.impersonation.acl";
+ public static final Object NIMBUS_IMPERSONATION_ACL_SCHEMA = ConfigValidation.MapOfStringToMapValidator;
+
+ /**
+ * How often nimbus should wake up to renew credentials if needed.
+ */
+ public static final String NIMBUS_CREDENTIAL_RENEW_FREQ_SECS = "nimbus.credential.renewers.freq.secs";
+ public static final Object NIMBUS_CREDENTIAL_RENEW_FREQ_SECS_SCHEMA = Number.class;
+
+ /**
+ * A list of credential renewers that nimbus should load.
+ */
+ public static final String NIMBUS_CREDENTIAL_RENEWERS = "nimbus.credential.renewers.classes";
+ public static final Object NIMBUS_CREDENTIAL_RENEWERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of plugins that nimbus should load during submit topology to populate
+ * credentials on user's behalf.
+ */
+ public static final String NIMBUS_AUTO_CRED_PLUGINS = "nimbus.autocredential.plugins.classes";
+ public static final Object NIMBUS_AUTO_CRED_PLUGINS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * Storm UI binds to this host/interface.
+ */
+ public static final String UI_HOST = "ui.host";
+ public static final Object UI_HOST_SCHEMA = String.class;
+
+ /**
+ * Storm UI binds to this port.
+ */
+ public static final String UI_PORT = "ui.port";
+ public static final Object UI_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * HTTP UI port for log viewer
+ */
+ public static final String LOGVIEWER_PORT = "logviewer.port";
+ public static final Object LOGVIEWER_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Childopts for log viewer java process.
+ */
+ public static final String LOGVIEWER_CHILDOPTS = "logviewer.childopts";
+ public static final Object LOGVIEWER_CHILDOPTS_SCHEMA = String.class;
+
+ /**
+ * How often to clean up old log files
+ */
+ public static final String LOGVIEWER_CLEANUP_INTERVAL_SECS = "logviewer.cleanup.interval.secs";
+ public static final Object LOGVIEWER_CLEANUP_INTERVAL_SECS_SCHEMA = ConfigValidation.PositiveIntegerValidator;
+
+ /**
+ * How many minutes since a log was last modified for the log to be considered for clean-up
+ */
+ public static final String LOGVIEWER_CLEANUP_AGE_MINS = "logviewer.cleanup.age.mins";
+ public static final Object LOGVIEWER_CLEANUP_AGE_MINS_SCHEMA = ConfigValidation.PositiveIntegerValidator;
+
+ /**
+ * A list of users allowed to view logs via the Log Viewer
+ */
+ public static final String LOGS_USERS = "logs.users";
+ public static final Object LOGS_USERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of groups allowed to view logs via the Log Viewer
+ */
+ public static final String LOGS_GROUPS = "logs.groups";
+ public static final Object LOGS_GROUPS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * Appender name used by log viewer to determine log directory.
+ */
+ public static final String LOGVIEWER_APPENDER_NAME = "logviewer.appender.name";
+ public static final Object LOGVIEWER_APPENDER_NAME_SCHEMA = String.class;
+
+ /**
+ * Childopts for Storm UI Java process.
+ */
+ public static final String UI_CHILDOPTS = "ui.childopts";
+ public static final Object UI_CHILDOPTS_SCHEMA = String.class;
+
+ /**
+ * A class implementing javax.servlet.Filter for authenticating/filtering UI requests
+ */
+ public static final String UI_FILTER = "ui.filter";
+ public static final Object UI_FILTER_SCHEMA = String.class;
+
+ /**
+ * Initialization parameters for the javax.servlet.Filter
+ */
+ public static final String UI_FILTER_PARAMS = "ui.filter.params";
+ public static final Object UI_FILTER_PARAMS_SCHEMA = Map.class;
+
+ /**
+ * The size of the header buffer for the UI in bytes
+ */
+ public static final String UI_HEADER_BUFFER_BYTES = "ui.header.buffer.bytes";
+ public static final Object UI_HEADER_BUFFER_BYTES_SCHEMA = Number.class;
+
+ /**
+ * This port is used by Storm DRPC for receiving HTTPS (SSL) DPRC requests from clients.
+ */
+ public static final String UI_HTTPS_PORT = "ui.https.port";
+ public static final Object UI_HTTPS_PORT_SCHEMA = Number.class;
+
+ /**
+ * Path to the keystore used by Storm UI for setting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_KEYSTORE_PATH = "ui.https.keystore.path";
+ public static final Object UI_HTTPS_KEYSTORE_PATH_SCHEMA = String.class;
+
+ /**
+ * Password to the keystore used by Storm UI for setting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_KEYSTORE_PASSWORD = "ui.https.keystore.password";
+ public static final Object UI_HTTPS_KEYSTORE_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Type of keystore used by Storm UI for setting up HTTPS (SSL).
+ * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details.
+ */
+ public static final String UI_HTTPS_KEYSTORE_TYPE = "ui.https.keystore.type";
+ public static final Object UI_HTTPS_KEYSTORE_TYPE_SCHEMA = String.class;
+
+ /**
+ * Password to the private key in the keystore for settting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_KEY_PASSWORD = "ui.https.key.password";
+ public static final Object UI_HTTPS_KEY_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Path to the truststore used by Storm UI settting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_TRUSTSTORE_PATH = "ui.https.truststore.path";
+ public static final Object UI_HTTPS_TRUSTSTORE_PATH_SCHEMA = String.class;
+
+ /**
+ * Password to the truststore used by Storm UI settting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_TRUSTSTORE_PASSWORD = "ui.https.truststore.password";
+ public static final Object UI_HTTPS_TRUSTSTORE_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Type of truststore used by Storm UI for setting up HTTPS (SSL).
+ * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details.
+ */
+ public static final String UI_HTTPS_TRUSTSTORE_TYPE = "ui.https.truststore.type";
+ public static final Object UI_HTTPS_TRUSTSTORE_TYPE_SCHEMA = String.class;
+
+ /**
+ * Password to the truststore used by Storm DRPC settting up HTTPS (SSL).
+ */
+ public static final String UI_HTTPS_WANT_CLIENT_AUTH = "ui.https.want.client.auth";
+ public static final Object UI_HTTPS_WANT_CLIENT_AUTH_SCHEMA = Boolean.class;
+
+ public static final String UI_HTTPS_NEED_CLIENT_AUTH = "ui.https.need.client.auth";
+ public static final Object UI_HTTPS_NEED_CLIENT_AUTH_SCHEMA = Boolean.class;
+
+
+ /**
+ * List of DRPC servers so that the DRPCSpout knows who to talk to.
+ */
+ public static final String DRPC_SERVERS = "drpc.servers";
+ public static final Object DRPC_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * This port is used by Storm DRPC for receiving HTTP DPRC requests from clients.
+ */
+ public static final String DRPC_HTTP_PORT = "drpc.http.port";
+ public static final Object DRPC_HTTP_PORT_SCHEMA = Number.class;
+
+ /**
+ * This port is used by Storm DRPC for receiving HTTPS (SSL) DPRC requests from clients.
+ */
+ public static final String DRPC_HTTPS_PORT = "drpc.https.port";
+ public static final Object DRPC_HTTPS_PORT_SCHEMA = Number.class;
+
+ /**
+ * Path to the keystore used by Storm DRPC for setting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_KEYSTORE_PATH = "drpc.https.keystore.path";
+ public static final Object DRPC_HTTPS_KEYSTORE_PATH_SCHEMA = String.class;
+
+ /**
+ * Password to the keystore used by Storm DRPC for setting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_KEYSTORE_PASSWORD = "drpc.https.keystore.password";
+ public static final Object DRPC_HTTPS_KEYSTORE_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Type of keystore used by Storm DRPC for setting up HTTPS (SSL).
+ * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details.
+ */
+ public static final String DRPC_HTTPS_KEYSTORE_TYPE = "drpc.https.keystore.type";
+ public static final Object DRPC_HTTPS_KEYSTORE_TYPE_SCHEMA = String.class;
+
+ /**
+ * Password to the private key in the keystore for settting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_KEY_PASSWORD = "drpc.https.key.password";
+ public static final Object DRPC_HTTPS_KEY_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Path to the truststore used by Storm DRPC settting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_TRUSTSTORE_PATH = "drpc.https.truststore.path";
+ public static final Object DRPC_HTTPS_TRUSTSTORE_PATH_SCHEMA = String.class;
+
+ /**
+ * Password to the truststore used by Storm DRPC settting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_TRUSTSTORE_PASSWORD = "drpc.https.truststore.password";
+ public static final Object DRPC_HTTPS_TRUSTSTORE_PASSWORD_SCHEMA = String.class;
+
+ /**
+ * Type of truststore used by Storm DRPC for setting up HTTPS (SSL).
+ * see http://docs.oracle.com/javase/7/docs/api/java/security/KeyStore.html for more details.
+ */
+ public static final String DRPC_HTTPS_TRUSTSTORE_TYPE = "drpc.https.truststore.type";
+ public static final Object DRPC_HTTPS_TRUSTSTORE_TYPE_SCHEMA = String.class;
+
+ /**
+ * Password to the truststore used by Storm DRPC settting up HTTPS (SSL).
+ */
+ public static final String DRPC_HTTPS_WANT_CLIENT_AUTH = "drpc.https.want.client.auth";
+ public static final Object DRPC_HTTPS_WANT_CLIENT_AUTH_SCHEMA = Boolean.class;
+
+ public static final String DRPC_HTTPS_NEED_CLIENT_AUTH = "drpc.https.need.client.auth";
+ public static final Object DRPC_HTTPS_NEED_CLIENT_AUTH_SCHEMA = Boolean.class;
+
+ /**
+ * The DRPC transport plug-in for Thrift client/server communication
+ */
+ public static final String DRPC_THRIFT_TRANSPORT_PLUGIN = "drpc.thrift.transport";
+ public static final Object DRPC_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * This port is used by Storm DRPC for receiving DPRC requests from clients.
+ */
+ public static final String DRPC_PORT = "drpc.port";
+ public static final Object DRPC_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Class name for authorization plugin for DRPC client
+ */
+ public static final String DRPC_AUTHORIZER = "drpc.authorizer";
+ public static final Object DRPC_AUTHORIZER_SCHEMA = String.class;
+
+ /**
+ * The Access Control List for the DRPC Authorizer.
+ * @see DRPCSimpleAclAuthorizer
+ */
+ public static final String DRPC_AUTHORIZER_ACL = "drpc.authorizer.acl";
+ public static final Object DRPC_AUTHORIZER_ACL_SCHEMA = Map.class;
+
+ /**
+ * File name of the DRPC Authorizer ACL.
+ * @see DRPCSimpleAclAuthorizer
+ */
+ public static final String DRPC_AUTHORIZER_ACL_FILENAME = "drpc.authorizer.acl.filename";
+ public static final Object DRPC_AUTHORIZER_ACL_FILENAME_SCHEMA = String.class;
+
+ /**
+ * Whether the DRPCSimpleAclAuthorizer should deny requests for operations
+ * involving functions that have no explicit ACL entry. When set to false
+ * (the default) DRPC functions that have no entry in the ACL will be
+ * permitted, which is appropriate for a development environment. When set
+ * to true, explicit ACL entries are required for every DRPC function, and
+ * any request for functions will be denied.
+ * @see DRPCSimpleAclAuthorizer
+ */
+ public static final String DRPC_AUTHORIZER_ACL_STRICT = "drpc.authorizer.acl.strict";
+ public static final Object DRPC_AUTHORIZER_ACL_STRICT_SCHEMA = Boolean.class;
+
+ /**
+ * DRPC thrift server worker threads
+ */
+ public static final String DRPC_WORKER_THREADS = "drpc.worker.threads";
+ public static final Object DRPC_WORKER_THREADS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The maximum buffer size thrift should use when reading messages for DRPC.
+ */
+ public static final String DRPC_MAX_BUFFER_SIZE = "drpc.max_buffer_size";
+ public static final Object DRPC_MAX_BUFFER_SIZE_SCHEMA = Number.class;
+
+ /**
+ * DRPC thrift server queue size
+ */
+ public static final String DRPC_QUEUE_SIZE = "drpc.queue.size";
+ public static final Object DRPC_QUEUE_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The DRPC invocations transport plug-in for Thrift client/server communication
+ */
+ public static final String DRPC_INVOCATIONS_THRIFT_TRANSPORT_PLUGIN = "drpc.invocations.thrift.transport";
+ public static final Object DRPC_INVOCATIONS_THRIFT_TRANSPORT_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * This port on Storm DRPC is used by DRPC topologies to receive function invocations and send results back.
+ */
+ public static final String DRPC_INVOCATIONS_PORT = "drpc.invocations.port";
+ public static final Object DRPC_INVOCATIONS_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * DRPC invocations thrift server worker threads
+ */
+ public static final String DRPC_INVOCATIONS_THREADS = "drpc.invocations.threads";
+ public static final Object DRPC_INVOCATIONS_THREADS_SCHEMA = Number.class;
+
+ /**
+ * The timeout on DRPC requests within the DRPC server. Defaults to 10 minutes. Note that requests can also
+ * timeout based on the socket timeout on the DRPC client, and separately based on the topology message
+ * timeout for the topology implementing the DRPC function.
+ */
+ public static final String DRPC_REQUEST_TIMEOUT_SECS = "drpc.request.timeout.secs";
+ public static final Object DRPC_REQUEST_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Childopts for Storm DRPC Java process.
+ */
+ public static final String DRPC_CHILDOPTS = "drpc.childopts";
+ public static final Object DRPC_CHILDOPTS_SCHEMA = String.class;
+
+ /**
+ * Class name of the HTTP credentials plugin for the UI.
+ */
+ public static final String UI_HTTP_CREDS_PLUGIN = "ui.http.creds.plugin";
+ public static final Object UI_HTTP_CREDS_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * Class name of the HTTP credentials plugin for DRPC.
+ */
+ public static final String DRPC_HTTP_CREDS_PLUGIN = "drpc.http.creds.plugin";
+ public static final Object DRPC_HTTP_CREDS_PLUGIN_SCHEMA = String.class;
+
+ /**
+ * the metadata configured on the supervisor
+ */
+ public static final String SUPERVISOR_SCHEDULER_META = "supervisor.scheduler.meta";
+ public static final Object SUPERVISOR_SCHEDULER_META_SCHEMA = Map.class;
+ /**
+ * A list of ports that can run workers on this supervisor. Each worker uses one port, and
+ * the supervisor will only run one worker per port. Use this configuration to tune
+ * how many workers run on each machine.
+ */
+ public static final String SUPERVISOR_SLOTS_PORTS = "supervisor.slots.ports";
+ public static final Object SUPERVISOR_SLOTS_PORTS_SCHEMA = ConfigValidation.IntegersValidator;
+
+ /**
+ * A number representing the maximum number of workers any single topology can acquire.
+ */
+ public static final String NIMBUS_SLOTS_PER_TOPOLOGY = "nimbus.slots.perTopology";
+ public static final Object NIMBUS_SLOTS_PER_TOPOLOGY_SCHEMA = Number.class;
+
+ /**
+ * A class implementing javax.servlet.Filter for DRPC HTTP requests
+ */
+ public static final String DRPC_HTTP_FILTER = "drpc.http.filter";
+ public static final Object DRPC_HTTP_FILTER_SCHEMA = String.class;
+
+ /**
+ * Initialization parameters for the javax.servlet.Filter of the DRPC HTTP
+ * service
+ */
+ public static final String DRPC_HTTP_FILTER_PARAMS = "drpc.http.filter.params";
+ public static final Object DRPC_HTTP_FILTER_PARAMS_SCHEMA = Map.class;
+
+ /**
+ * A number representing the maximum number of executors any single topology can acquire.
+ */
+ public static final String NIMBUS_EXECUTORS_PER_TOPOLOGY = "nimbus.executors.perTopology";
+ public static final Object NIMBUS_EXECUTORS_PER_TOPOLOGY_SCHEMA = Number.class;
+
+ /**
+ * This parameter is used by the storm-deploy project to configure the
+ * jvm options for the supervisor daemon.
+ */
+ public static final String SUPERVISOR_CHILDOPTS = "supervisor.childopts";
+ public static final Object SUPERVISOR_CHILDOPTS_SCHEMA = String.class;
+
+ /**
+ * How long a worker can go without heartbeating before the supervisor tries to
+ * restart the worker process.
+ */
+ public static final String SUPERVISOR_WORKER_TIMEOUT_SECS = "supervisor.worker.timeout.secs";
+ public static final Object SUPERVISOR_WORKER_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How many seconds to sleep for before shutting down threads on worker
+ */
+ public static final String SUPERVISOR_WORKER_SHUTDOWN_SLEEP_SECS = "supervisor.worker.shutdown.sleep.secs";
+ public static final Object SUPERVISOR_WORKER_SHUTDOWN_SLEEP_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How long a worker can go without heartbeating during the initial launch before
+ * the supervisor tries to restart the worker process. This value override
+ * supervisor.worker.timeout.secs during launch because there is additional
+ * overhead to starting and configuring the JVM on launch.
+ */
+ public static final String SUPERVISOR_WORKER_START_TIMEOUT_SECS = "supervisor.worker.start.timeout.secs";
+ public static final Object SUPERVISOR_WORKER_START_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Whether or not the supervisor should launch workers assigned to it. Defaults
+ * to true -- and you should probably never change this value. This configuration
+ * is used in the Storm unit tests.
+ */
+ public static final String SUPERVISOR_ENABLE = "supervisor.enable";
+ public static final Object SUPERVISOR_ENABLE_SCHEMA = Boolean.class;
+
+ /**
+ * how often the supervisor sends a heartbeat to the master.
+ */
+ public static final String SUPERVISOR_HEARTBEAT_FREQUENCY_SECS = "supervisor.heartbeat.frequency.secs";
+ public static final Object SUPERVISOR_HEARTBEAT_FREQUENCY_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * How often the supervisor checks the worker heartbeats to see if any of them
+ * need to be restarted.
+ */
+ public static final String SUPERVISOR_MONITOR_FREQUENCY_SECS = "supervisor.monitor.frequency.secs";
+ public static final Object SUPERVISOR_MONITOR_FREQUENCY_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Should the supervior try to run the worker as the lauching user or not. Defaults to false.
+ */
+ public static final String SUPERVISOR_RUN_WORKER_AS_USER = "supervisor.run.worker.as.user";
+ public static final Object SUPERVISOR_RUN_WORKER_AS_USER_SCHEMA = Boolean.class;
+
+ /**
+ * Full path to the worker-laucher executable that will be used to lauch workers when
+ * SUPERVISOR_RUN_WORKER_AS_USER is set to true.
+ */
+ public static final String SUPERVISOR_WORKER_LAUNCHER = "supervisor.worker.launcher";
+ public static final Object SUPERVISOR_WORKER_LAUNCHER_SCHEMA = String.class;
+
+ /**
+ * The jvm opts provided to workers launched by this supervisor. All "%ID%", "%WORKER-ID%", "%TOPOLOGY-ID%"
+ * and "%WORKER-PORT%" substrings are replaced with:
+ * %ID% -> port (for backward compatibility),
+ * %WORKER-ID% -> worker-id,
+ * %TOPOLOGY-ID% -> topology-id,
+ * %WORKER-PORT% -> port.
+ */
+ public static final String WORKER_CHILDOPTS = "worker.childopts";
+ public static final Object WORKER_CHILDOPTS_SCHEMA = ConfigValidation.StringOrStringListValidator;
+
+ /**
+ * The jvm opts provided to workers launched by this supervisor for GC. All "%ID%" substrings are replaced
+ * with an identifier for this worker. Because the JVM complains about multiple GC opts the topology
+ * can override this default value by setting topology.worker.gc.childopts.
+ */
+ public static final String WORKER_GC_CHILDOPTS = "worker.gc.childopts";
+ public static final Object WORKER_GC_CHILDOPTS_SCHEMA = ConfigValidation.StringOrStringListValidator;
+
+ /**
+ * control how many worker receiver threads we need per worker
+ */
+ public static final String WORKER_RECEIVER_THREAD_COUNT = "topology.worker.receiver.thread.count";
+ public static final Object WORKER_RECEIVER_THREAD_COUNT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How often this worker should heartbeat to the supervisor.
+ */
+ public static final String WORKER_HEARTBEAT_FREQUENCY_SECS = "worker.heartbeat.frequency.secs";
+ public static final Object WORKER_HEARTBEAT_FREQUENCY_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How often a task should heartbeat its status to the master.
+ */
+ public static final String TASK_HEARTBEAT_FREQUENCY_SECS = "task.heartbeat.frequency.secs";
+ public static final Object TASK_HEARTBEAT_FREQUENCY_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * How often a task should sync its connections with other tasks (if a task is
+ * reassigned, the other tasks sending messages to it need to refresh their connections).
+ * In general though, when a reassignment happens other tasks will be notified
+ * almost immediately. This configuration is here just in case that notification doesn't
+ * come through.
+ */
+ public static final String TASK_REFRESH_POLL_SECS = "task.refresh.poll.secs";
+ public static final Object TASK_REFRESH_POLL_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * How often a task should sync credentials, worst case.
+ */
+ public static final String TASK_CREDENTIALS_POLL_SECS = "task.credentials.poll.secs";
+ public static final Object TASK_CREDENTIALS_POLL_SECS_SCHEMA = Number.class;
+
+
+ /**
+ * A list of users that are allowed to interact with the topology. To use this set
+ * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String TOPOLOGY_USERS = "topology.users";
+ public static final Object TOPOLOGY_USERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of groups that are allowed to interact with the topology. To use this set
+ * nimbus.authorizer to backtype.storm.security.auth.authorizer.SimpleACLAuthorizer
+ */
+ public static final String TOPOLOGY_GROUPS = "topology.groups";
+ public static final Object TOPOLOGY_GROUPS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * True if Storm should timeout messages or not. Defaults to true. This is meant to be used
+ * in unit tests to prevent tuples from being accidentally timed out during the test.
+ */
+ public static final String TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS = "topology.enable.message.timeouts";
+ public static final Object TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS_SCHEMA = Boolean.class;
+
+ /**
+ * When set to true, Storm will log every message that's emitted.
+ */
+ public static final String TOPOLOGY_DEBUG = "topology.debug";
+ public static final Object TOPOLOGY_DEBUG_SCHEMA = Boolean.class;
+
+ /**
+ * The serializer for communication between shell components and non-JVM
+ * processes
+ */
+ public static final String TOPOLOGY_MULTILANG_SERIALIZER = "topology.multilang.serializer";
+ public static final Object TOPOLOGY_MULTILANG_SERIALIZER_SCHEMA = String.class;
+
+ /**
+ * How many processes should be spawned around the cluster to execute this
+ * topology. Each process will execute some number of tasks as threads within
+ * them. This parameter should be used in conjunction with the parallelism hints
+ * on each component in the topology to tune the performance of a topology.
+ */
+ public static final String TOPOLOGY_WORKERS = "topology.workers";
+ public static final Object TOPOLOGY_WORKERS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How many instances to create for a spout/bolt. A task runs on a thread with zero or more
+ * other tasks for the same spout/bolt. The number of tasks for a spout/bolt is always
+ * the same throughout the lifetime of a topology, but the number of executors (threads) for
+ * a spout/bolt can change over time. This allows a topology to scale to more or less resources
+ * without redeploying the topology or violating the constraints of Storm (such as a fields grouping
+ * guaranteeing that the same value goes to the same task).
+ */
+ public static final String TOPOLOGY_TASKS = "topology.tasks";
+ public static final Object TOPOLOGY_TASKS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How many executors to spawn for ackers.
+ *
+ * <p>If this is set to 0, then Storm will immediately ack tuples as soon
+ * as they come off the spout, effectively disabling reliability.</p>
+ */
+ public static final String TOPOLOGY_ACKER_EXECUTORS = "topology.acker.executors";
+ public static final Object TOPOLOGY_ACKER_EXECUTORS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * The maximum amount of time given to the topology to fully process a message
+ * emitted by a spout. If the message is not acked within this time frame, Storm
+ * will fail the message on the spout. Some spouts implementations will then replay
+ * the message at a later time.
+ */
+ public static final String TOPOLOGY_MESSAGE_TIMEOUT_SECS = "topology.message.timeout.secs";
+ public static final Object TOPOLOGY_MESSAGE_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * A list of serialization registrations for Kryo ( http://code.google.com/p/kryo/ ),
+ * the underlying serialization framework for Storm. A serialization can either
+ * be the name of a class (in which case Kryo will automatically create a serializer for the class
+ * that saves all the object's fields), or an implementation of com.esotericsoftware.kryo.Serializer.
+ *
+ * See Kryo's documentation for more information about writing custom serializers.
+ */
+ public static final String TOPOLOGY_KRYO_REGISTER = "topology.kryo.register";
+ public static final Object TOPOLOGY_KRYO_REGISTER_SCHEMA = ConfigValidation.KryoRegValidator;
+
+ /**
+ * A list of classes that customize storm's kryo instance during start-up.
+ * Each listed class name must implement IKryoDecorator. During start-up the
+ * listed class is instantiated with 0 arguments, then its 'decorate' method
+ * is called with storm's kryo instance as the only argument.
+ */
+ public static final String TOPOLOGY_KRYO_DECORATORS = "topology.kryo.decorators";
+ public static final Object TOPOLOGY_KRYO_DECORATORS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * Class that specifies how to create a Kryo instance for serialization. Storm will then apply
+ * topology.kryo.register and topology.kryo.decorators on top of this. The default implementation
+ * implements topology.fall.back.on.java.serialization and turns references off.
+ */
+ public static final String TOPOLOGY_KRYO_FACTORY = "topology.kryo.factory";
+ public static final Object TOPOLOGY_KRYO_FACTORY_SCHEMA = String.class;
+
+
+ /**
+ * Whether or not Storm should skip the loading of kryo registrations for which it
+ * does not know the class or have the serializer implementation. Otherwise, the task will
+ * fail to load and will throw an error at runtime. The use case of this is if you want to
+ * declare your serializations on the storm.yaml files on the cluster rather than every single
+ * time you submit a topology. Different applications may use different serializations and so
+ * a single application may not have the code for the other serializers used by other apps.
+ * By setting this config to true, Storm will ignore that it doesn't have those other serializations
+ * rather than throw an error.
+ */
+ public static final String TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS= "topology.skip.missing.kryo.registrations";
+ public static final Object TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS_SCHEMA = Boolean.class;
+
+ /*
+ * A list of classes implementing IMetricsConsumer (See storm.yaml.example for exact config format).
+ * Each listed class will be routed all the metrics data generated by the storm metrics API.
+ * Each listed class maps 1:1 to a system bolt named __metrics_ClassName#N, and it's parallelism is configurable.
+ */
+ public static final String TOPOLOGY_METRICS_CONSUMER_REGISTER = "topology.metrics.consumer.register";
+ public static final Object TOPOLOGY_METRICS_CONSUMER_REGISTER_SCHEMA = ConfigValidation.MapsValidator;
+
+
+ /**
+ * The maximum parallelism allowed for a component in this topology. This configuration is
+ * typically used in testing to limit the number of threads spawned in local mode.
+ */
+ public static final String TOPOLOGY_MAX_TASK_PARALLELISM="topology.max.task.parallelism";
+ public static final Object TOPOLOGY_MAX_TASK_PARALLELISM_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * The maximum number of tuples that can be pending on a spout task at any given time.
+ * This config applies to individual tasks, not to spouts or topologies as a whole.
+ *
+ * A pending tuple is one that has been emitted from a spout but has not been acked or failed yet.
+ * Note that this config parameter has no effect for unreliable spouts that don't tag
+ * their tuples with a message id.
+ */
+ public static final String TOPOLOGY_MAX_SPOUT_PENDING="topology.max.spout.pending";
+ public static final Object TOPOLOGY_MAX_SPOUT_PENDING_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * A class that implements a strategy for what to do when a spout needs to wait. Waiting is
+ * triggered in one of two conditions:
+ *
+ * 1. nextTuple emits no tuples
+ * 2. The spout has hit maxSpoutPending and can't emit any more tuples
+ */
+ public static final String TOPOLOGY_SPOUT_WAIT_STRATEGY="topology.spout.wait.strategy";
+ public static final Object TOPOLOGY_SPOUT_WAIT_STRATEGY_SCHEMA = String.class;
+
+ /**
+ * The amount of milliseconds the SleepEmptyEmitStrategy should sleep for.
+ */
+ public static final String TOPOLOGY_SLEEP_SPOUT_WAIT_STRATEGY_TIME_MS="topology.sleep.spout.wait.strategy.time.ms";
+ public static final Object TOPOLOGY_SLEEP_SPOUT_WAIT_STRATEGY_TIME_MS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The maximum amount of time a component gives a source of state to synchronize before it requests
+ * synchronization again.
+ */
+ public static final String TOPOLOGY_STATE_SYNCHRONIZATION_TIMEOUT_SECS="topology.state.synchronization.timeout.secs";
+ public static final Object TOPOLOGY_STATE_SYNCHRONIZATION_TIMEOUT_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The percentage of tuples to sample to produce stats for a task.
+ */
+ public static final String TOPOLOGY_STATS_SAMPLE_RATE="topology.stats.sample.rate";
+ public static final Object TOPOLOGY_STATS_SAMPLE_RATE_SCHEMA = ConfigValidation.DoubleValidator;
+
+ /**
+ * The time period that builtin metrics data in bucketed into.
+ */
+ public static final String TOPOLOGY_BUILTIN_METRICS_BUCKET_SIZE_SECS="topology.builtin.metrics.bucket.size.secs";
+ public static final Object TOPOLOGY_BUILTIN_METRICS_BUCKET_SIZE_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Whether or not to use Java serialization in a topology.
+ */
+ public static final String TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION="topology.fall.back.on.java.serialization";
+ public static final Object TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION_SCHEMA = Boolean.class;
+
+ /**
+ * Topology-specific options for the worker child process. This is used in addition to WORKER_CHILDOPTS.
+ */
+ public static final String TOPOLOGY_WORKER_CHILDOPTS="topology.worker.childopts";
+ public static final Object TOPOLOGY_WORKER_CHILDOPTS_SCHEMA = ConfigValidation.StringOrStringListValidator;
+
+ /**
+ * Topology-specific options GC for the worker child process. This overrides WORKER_GC_CHILDOPTS.
+ */
+ public static final String TOPOLOGY_WORKER_GC_CHILDOPTS="topology.worker.gc.childopts";
+ public static final Object TOPOLOGY_WORKER_GC_CHILDOPTS_SCHEMA = ConfigValidation.StringOrStringListValidator;
+
+ /**
+ * Topology-specific classpath for the worker child process. This is combined to the usual classpath.
+ */
+ public static final String TOPOLOGY_CLASSPATH="topology.classpath";
+ public static final Object TOPOLOGY_CLASSPATH_SCHEMA = ConfigValidation.StringOrStringListValidator;
+
+ /**
+ * Topology-specific environment variables for the worker child process.
+ * This is added to the existing environment (that of the supervisor)
+ */
+ public static final String TOPOLOGY_ENVIRONMENT="topology.environment";
+ public static final Object TOPOLOGY_ENVIRONMENT_SCHEMA = Map.class;
+
+ /*
+ * Topology-specific option to disable/enable bolt's outgoing overflow buffer.
+ * Enabling this option ensures that the bolt can always clear the incoming messages,
+ * preventing live-lock for the topology with cyclic flow.
+ * The overflow buffer can fill degrading the performance gradually,
+ * eventually running out of memory.
+ */
+ public static final String TOPOLOGY_BOLTS_OUTGOING_OVERFLOW_BUFFER_ENABLE="topology.bolts.outgoing.overflow.buffer.enable";
+ public static final Object TOPOLOGY_BOLTS_OUTGOING_OVERFLOW_BUFFER_ENABLE_SCHEMA = Boolean.class;
+
+ /**
+ * This config is available for TransactionalSpouts, and contains the id ( a String) for
+ * the transactional topology. This id is used to store the state of the transactional
+ * topology in Zookeeper.
+ */
+ public static final String TOPOLOGY_TRANSACTIONAL_ID="topology.transactional.id";
+ public static final Object TOPOLOGY_TRANSACTIONAL_ID_SCHEMA = String.class;
+
+ /**
+ * A list of task hooks that are automatically added to every spout and bolt in the topology. An example
+ * of when you'd do this is to add a hook that integrates with your internal
+ * monitoring system. These hooks are instantiated using the zero-arg constructor.
+ */
+ public static final String TOPOLOGY_AUTO_TASK_HOOKS="topology.auto.task.hooks";
+ public static final Object TOPOLOGY_AUTO_TASK_HOOKS_SCHEMA = ConfigValidation.StringsValidator;
+
+
+ /**
+ * The size of the Disruptor receive queue for each executor. Must be a power of 2.
+ */
+ public static final String TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE="topology.executor.receive.buffer.size";
+ public static final Object TOPOLOGY_EXECUTOR_RECEIVE_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
+
+ /**
+ * The maximum number of messages to batch from the thread receiving off the network to the
+ * executor queues. Must be a power of 2.
+ */
+ public static final String TOPOLOGY_RECEIVER_BUFFER_SIZE="topology.receiver.buffer.size";
+ public static final Object TOPOLOGY_RECEIVER_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
+
+ /**
+ * The size of the Disruptor send queue for each executor. Must be a power of 2.
+ */
+ public static final String TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE="topology.executor.send.buffer.size";
+ public static final Object TOPOLOGY_EXECUTOR_SEND_BUFFER_SIZE_SCHEMA = ConfigValidation.PowerOf2Validator;
+
+ /**
+ * The size of the Disruptor transfer queue for each worker.
+ */
+ public static final String TOPOLOGY_TRANSFER_BUFFER_SIZE="topology.transfer.buffer.size";
+ public static final Object TOPOLOGY_TRANSFER_BUFFER_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How often a tick tuple from the "__system" component and "__tick" stream should be sent
+ * to tasks. Meant to be used as a component-specific configuration.
+ */
+ public static final String TOPOLOGY_TICK_TUPLE_FREQ_SECS="topology.tick.tuple.freq.secs";
+ public static final Object TOPOLOGY_TICK_TUPLE_FREQ_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * Configure the wait strategy used for internal queuing. Can be used to tradeoff latency
+ * vs. throughput
+ */
+ public static final String TOPOLOGY_DISRUPTOR_WAIT_STRATEGY="topology.disruptor.wait.strategy";
+ public static final Object TOPOLOGY_DISRUPTOR_WAIT_STRATEGY_SCHEMA = String.class;
+
+ /**
+ * Configure the wait timeout used for timeout blocking wait strategy.
+ */
+ public static final String TOPOLOGY_DISRUPTOR_WAIT_TIMEOUT =
+ "topology.disruptor.wait.timeout";
+ public static final Object TOPOLOGY_DISRUPTOR_WAIT_TIMEOUT_SCHEMA =
+ Number.class;
+
+ /*
+ * The size of the shared thread pool for worker tasks to make use of. The thread pool can be accessed
+ * via the TopologyContext.
+ */
+ public static final String TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE="topology.worker.shared.thread.pool.size";
+ public static final Object TOPOLOGY_WORKER_SHARED_THREAD_POOL_SIZE_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The interval in seconds to use for determining whether to throttle error reported to Zookeeper. For example,
+ * an interval of 10 seconds with topology.max.error.report.per.interval set to 5 will only allow 5 errors to be
+ * reported to Zookeeper per task for every 10 second interval of time.
+ */
+ public static final String TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS="topology.error.throttle.interval.secs";
+ public static final Object TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * See doc for TOPOLOGY_ERROR_THROTTLE_INTERVAL_SECS
+ */
+ public static final String TOPOLOGY_MAX_ERROR_REPORT_PER_INTERVAL="topology.max.error.report.per.interval";
+ public static final Object TOPOLOGY_MAX_ERROR_REPORT_PER_INTERVAL_SCHEMA = ConfigValidation.IntegerValidator;
+
+
+ /**
+ * How often a batch can be emitted in a Trident topology.
+ */
+ public static final String TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS="topology.trident.batch.emit.interval.millis";
+ public static final Object TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * Name of the topology. This config is automatically set by Storm when the topology is submitted.
+ */
+ public final static String TOPOLOGY_NAME="topology.name";
+ public static final Object TOPOLOGY_NAME_SCHEMA = String.class;
+
+ /**
+ * The principal who submitted a topology
+ */
+ public final static String TOPOLOGY_SUBMITTER_PRINCIPAL = "topology.submitter.principal";
+ public static final Object TOPOLOGY_SUBMITTER_PRINCIPAL_SCHEMA = String.class;
+
+ /**
+ * The local user name of the user who submitted a topology.
+ */
+ public static final String TOPOLOGY_SUBMITTER_USER = "topology.submitter.user";
+ public static final Object TOPOLOGY_SUBMITTER_USER_SCHEMA = String.class;
+
+ /**
+ * Array of components that scheduler should try to place on separate hosts.
+ */
+ public static final String TOPOLOGY_SPREAD_COMPONENTS = "topology.spread.components";
+ public static final Object TOPOLOGY_SPREAD_COMPONENTS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * A list of IAutoCredentials that the topology should load and use.
+ */
+ public static final String TOPOLOGY_AUTO_CREDENTIALS = "topology.auto-credentials";
+ public static final Object TOPOLOGY_AUTO_CREDENTIALS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * Max pending tuples in one ShellBolt
+ */
+ public static final String TOPOLOGY_SHELLBOLT_MAX_PENDING="topology.shellbolt.max.pending";
+ public static final Object TOPOLOGY_SHELLBOLT_MAX_PENDING_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The root directory in ZooKeeper for metadata about TransactionalSpouts.
+ */
+ public static final String TRANSACTIONAL_ZOOKEEPER_ROOT="transactional.zookeeper.root";
+ public static final Object TRANSACTIONAL_ZOOKEEPER_ROOT_SCHEMA = String.class;
+
+ /**
+ * The list of zookeeper servers in which to keep the transactional state. If null (which is default),
+ * will use storm.zookeeper.servers
+ */
+ public static final String TRANSACTIONAL_ZOOKEEPER_SERVERS="transactional.zookeeper.servers";
+ public static final Object TRANSACTIONAL_ZOOKEEPER_SERVERS_SCHEMA = ConfigValidation.StringsValidator;
+
+ /**
+ * The port to use to connect to the transactional zookeeper servers. If null (which is default),
+ * will use storm.zookeeper.port
+ */
+ public static final String TRANSACTIONAL_ZOOKEEPER_PORT="transactional.zookeeper.port";
+ public static final Object TRANSACTIONAL_ZOOKEEPER_PORT_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The user as which the nimbus client should be acquired to perform the operation.
+ */
+ public static final String STORM_DO_AS_USER="storm.doAsUser";
+ public static final Object STORM_DO_AS_USER_SCHEMA = String.class;
+
+ /**
+ * The number of threads that should be used by the zeromq context in each worker process.
+ */
+ public static final String ZMQ_THREADS = "zmq.threads";
+ public static final Object ZMQ_THREADS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * How long a connection should retry sending messages to a target host when
+ * the connection is closed. This is an advanced configuration and can almost
+ * certainly be ignored.
+ */
+ public static final String ZMQ_LINGER_MILLIS = "zmq.linger.millis";
+ public static final Object ZMQ_LINGER_MILLIS_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * The high water for the ZeroMQ push sockets used for networking. Use this config to prevent buffer explosion
+ * on the networking layer.
+ */
+ public static final String ZMQ_HWM = "zmq.hwm";
+ public static final Object ZMQ_HWM_SCHEMA = ConfigValidation.IntegerValidator;
+
+ /**
+ * This value is passed to spawned JVMs (e.g., Nimbus, Supervisor, and Workers)
+ * for the java.library.path value. java.library.path tells the JVM where
+ * to look for native libraries. It is necessary to set this config correctly since
+ * Storm uses the ZeroMQ and JZMQ native libs.
+ */
+ public static final String JAVA_LIBRARY_PATH = "java.library.path";
+ public static final Object JAVA_LIBRARY_PATH_SCHEMA = String.class;
+
+ /**
+ * The path to use as the zookeeper dir when running a zookeeper server via
+ * "storm dev-zookeeper". This zookeeper instance is only intended for development;
+ * it is not a production grade zookeeper setup.
+ */
+ public static final String DEV_ZOOKEEPER_PATH = "dev.zookeeper.path";
+ public static final Object DEV_ZOOKEEPER_PATH_SCHEMA = String.class;
+
+ /**
+ * A map from topology name to the number of machines that should be dedicated for that topology. Set storm.scheduler
+ * to backtype.storm.scheduler.IsolationScheduler to make use of the isolation scheduler.
+ */
+ public static final String ISOLATION_SCHEDULER_MACHINES = "isolation.scheduler.machines";
+ public static final Object ISOLATION_SCHEDULER_MACHINES_SCHEMA = ConfigValidation.MapOfStringToNumberValidator;
+
+ /**
+ * A map from the user name to the number of machines that should that user is allowed to use. Set storm.scheduler
+ * to backtype.storm.scheduler.multitenant.MultitenantScheduler
+ */
+ public static final String MULTITENANT_SCHEDULER_USER_POOLS = "multitenant.scheduler.user.pools";
+ public static final Object MULTITENANT_SCHEDULER_USER_POOLS_SCHEMA = ConfigValidation.MapOfStringToNumberValidator;
+
+ /**
+ * The number of machines that should be used by this topology to isolate it from all others. Set storm.scheduler
+ * to backtype.storm.scheduler.multitenant.MultitenantScheduler
+ */
+ public static final String TOPOLOGY_ISOLATED_MACHINES = "topology.isolate.machines";
+ public static final Object TOPOLOGY_ISOLATED_MACHINES_SCHEMA = Number.class;
+
+ public static void setClasspath(Map conf, String cp) {
+ conf.put(Config.TOPOLOGY_CLASSPATH, cp);
+ }
+
+ public void setClasspath(String cp) {
+ setClasspath(this, cp);
+ }
+
+ public static void setEnvironment(Map conf, Map env) {
+ conf.put(Config.TOPOLOGY_ENVIRONMENT, env);
+ }
+
+ public void setEnvironment(Map env) {
+ setEnvironment(this, env);
+ }
+
+ public static void setDebug(Map conf, boolean isOn) {
+ conf.put(Config.TOPOLOGY_DEBUG, isOn);
+ }
+
+ public void setDebug(boolean isOn) {
+ setDebug(this, isOn);
+ }
+
+ public static void setNumWorkers(Map conf, int workers) {
+ conf.put(Config.TOPOLOGY_WORKERS, workers);
+ }
+
+ public void setNumWorkers(int workers) {
+ setNumWorkers(this, workers);
+ }
+
+ public static void setNumAckers(Map conf, int numExecutors) {
+ conf.put(Config.TOPOLOGY_ACKER_EXECUTORS, numExecutors);
+ }
+
+ public void setNumAckers(int numExecutors) {
+ setNumAckers(this, numExecutors);
+ }
+
+ public static void setMessageTimeoutSecs(Map conf, int secs) {
+ conf.put(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS, secs);
+ }
+
+ public void setMessageTimeoutSecs(int secs) {
+ setMessageTimeoutSecs(this, secs);
+ }
+
+ public static void registerSerialization(Map conf, Class klass) {
+ getRegisteredSerializations(conf).add(klass.getName());
+ }
+
+ public void registerSerialization(Class klass) {
+ registerSerialization(this, klass);
+ }
+
+ public static void registerSerialization(Map conf, Class klass, Class<? extends Serializer> serializerClass) {
+ Map<String, String> register = new HashMap<String, String>();
+ register.put(klass.getName(), serializerClass.getName());
+ getRegisteredSerializations(conf).add(register);
+ }
+
+ public void registerSerialization(Class klass, Class<? extends Serializer> serializerClass) {
+ registerSerialization(this, klass, serializerClass);
+ }
+
+ public static void registerMetricsConsumer(Map conf, Class klass, Object argument, long parallelismHint) {
+ HashMap m = new HashMap();
+ m.put("class", klass.getCanonicalName());
+ m.put("parallelism.hint", parallelismHint);
+ m.put("argument", argument);
+
+ List l = (List)conf.get(TOPOLOGY_METRICS_CONSUMER_REGISTER);
+ if (l == null) { l = new ArrayList(); }
+ l.add(m);
+ conf.put(TOPOLOGY_METRICS_CONSUMER_REGISTER, l);
+ }
+
+ public void registerMetricsConsumer(Class klass, Object argument, long parallelismHint) {
+ registerMetricsConsumer(this, klass, argument, parallelismHint);
+ }
+
+ public static void registerMetricsConsumer(Map conf, Class klass, long parallelismHint) {
+ registerMetricsConsumer(conf, klass, null, parallelismHint);
+ }
+
+ public void registerMetricsConsumer(Class klass, long parallelismHint) {
+ registerMetricsConsumer(this, klass, parallelismHint);
+ }
+
+ public static void registerMetricsConsumer(Map conf, Class klass) {
+ registerMetricsConsumer(conf, klass, null, 1L);
+ }
+
+ public void registerMetricsConsumer(Class klass) {
+ registerMetricsConsumer(this, klass);
+ }
+
+ public static void registerDecorator(Map conf, Class<? extends IKryoDecorator> klass) {
+ getRegisteredDecorators(conf).add(klass.getName());
+ }
+
+ public void registerDecorator(Class<? extends IKryoDecorator> klass) {
+ registerDecorator(this, klass);
+ }
+
+ public static void setKryoFactory(Map conf, Class<? extends IKryoFactory> klass) {
+ conf.put(Config.TOPOLOGY_KRYO_FACTORY, klass.getName());
+ }
+
+ public void setKryoFactory(Class<? extends IKryoFactory> klass) {
+ setKryoFactory(this, klass);
+ }
+
+ public static void setSkipMissingKryoRegistrations(Map conf, boolean skip) {
+ conf.put(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS, skip);
+ }
+
+ public void setSkipMissingKryoRegistrations(boolean skip) {
+ setSkipMissingKryoRegistrations(this, skip);
+ }
+
+ public static void setMaxTaskParallelism(Map conf, int max) {
+ conf.put(Config.TOPOLOGY_MAX_TASK_PARALLELISM, max);
+ }
+
+ public void setMaxTaskParallelism(int max) {
+ setMaxTaskParallelism(this, max);
+ }
+
+ public static void setMaxSpoutPending(Map conf, int max) {
+ conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING, max);
+ }
+
+ public void setMaxSpoutPending(int max) {
+ setMaxSpoutPending(this, max);
+ }
+
+ public static void setStatsSampleRate(Map conf, double rate) {
+ conf.put(Config.TOPOLOGY_STATS_SAMPLE_RATE, rate);
+ }
+
+ public void setStatsSampleRate(double rate) {
+ setStatsSampleRate(this, rate);
+ }
+
+ public static void setFallBackOnJavaSerialization(Map conf, boolean fallback) {
+ conf.put(Config.TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION, fallback);
+ }
+
+ public void setFallBackOnJavaSerialization(boolean fallback) {
+ setFallBackOnJavaSerialization(this, fallback);
+ }
+
+ private static List getRegisteredSerializations(Map conf) {
+ List ret;
+ if(!conf.containsKey(Config.TOPOLOGY_KRYO_REGISTER)) {
+ ret = new ArrayList();
+ } else {
+ ret = new ArrayList((List) conf.get(Config.TOPOLOGY_KRYO_REGISTER));
+ }
+ conf.put(Config.TOPOLOGY_KRYO_REGISTER, ret);
+ return ret;
+ }
+
+ private static List getRegisteredDecorators(Map conf) {
+ List ret;
+ if(!conf.containsKey(Config.TOPOLOGY_KRYO_DECORATORS)) {
+ ret = new ArrayList();
+ } else {
+ ret = new ArrayList((List) conf.get(Config.TOPOLOGY_KRYO_DECORATORS));
+ }
+ conf.put(Config.TOPOLOGY_KRYO_DECORATORS, ret);
+ return ret;
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/ConfigValidation.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/ConfigValidation.java b/jstorm-core/src/main/java/backtype/storm/ConfigValidation.java
new file mode 100755
index 0000000..24991d7
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/ConfigValidation.java
@@ -0,0 +1,351 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+import java.util.Map;
+
+import java.util.Map;
+
+/**
+ * Provides functionality for validating configuration fields.
+ */
+public class ConfigValidation {
+
+ /**
+ * Declares methods for validating configuration values.
+ */
+ public static interface FieldValidator {
+ /**
+ * Validates the given field.
+ * @param name the name of the field.
+ * @param field The field to be validated.
+ * @throws IllegalArgumentException if the field fails validation.
+ */
+ public void validateField(String name, Object field) throws IllegalArgumentException;
+ }
+
+ /**
+ * Declares a method for validating configuration values that is nestable.
+ */
+ public static abstract class NestableFieldValidator implements FieldValidator {
+ @Override
+ public void validateField(String name, Object field) throws IllegalArgumentException {
+ validateField(null, name, field);
+ }
+
+ /**
+ * Validates the given field.
+ * @param pd describes the parent wrapping this validator.
+ * @param name the name of the field.
+ * @param field The field to be validated.
+ * @throws IllegalArgumentException if the field fails validation.
+ */
+ public abstract void validateField(String pd, String name, Object field) throws IllegalArgumentException;
+ }
+
+ /**
+ * Returns a new NestableFieldValidator for a given class.
+ * @param cls the Class the field should be a type of
+ * @param nullAllowed whether or not a value of null is valid
+ * @return a NestableFieldValidator for that class
+ */
+ public static NestableFieldValidator fv(final Class cls, final boolean nullAllowed) {
+ return new NestableFieldValidator() {
+ @Override
+ public void validateField(String pd, String name, Object field)
+ throws IllegalArgumentException {
+ if (nullAllowed && field == null) {
+ return;
+ }
+ if (! cls.isInstance(field)) {
+ throw new IllegalArgumentException(
+ pd + name + " must be a " + cls.getName() + ". ("+field+")");
+ }
+ }
+ };
+ }
+
+ /**
+ * Returns a new NestableFieldValidator for a List of the given Class.
+ * @param cls the Class of elements composing the list
+ * @param nullAllowed whether or not a value of null is valid
+ * @return a NestableFieldValidator for a list of the given class
+ */
+ public static NestableFieldValidator listFv(Class cls, boolean nullAllowed) {
+ return listFv(fv(cls, false), nullAllowed);
+ }
+
+ /**
+ * Returns a new NestableFieldValidator for a List where each item is validated by validator.
+ * @param validator used to validate each item in the list
+ * @param nullAllowed whether or not a value of null is valid
+ * @return a NestableFieldValidator for a list with each item validated by a different validator.
+ */
+ public static NestableFieldValidator listFv(final NestableFieldValidator validator,
+ final boolean nullAllowed) {
+ return new NestableFieldValidator() {
+ @Override
+ public void validateField(String pd, String name, Object field)
+ throws IllegalArgumentException {
+ if (nullAllowed && field == null) {
+ return;
+ }
+ if (field instanceof Iterable) {
+ for (Object e : (Iterable)field) {
+ validator.validateField(pd + "Each element of the list ", name, e);
+ }
+ return;
+ }
+ throw new IllegalArgumentException(
+ "Field " + name + " must be an Iterable but was " +
+ ((field == null) ? "null" : ("a " + field.getClass())));
+ }
+ };
+ }
+
+ /**
+ * Returns a new NestableFieldValidator for a Map of key to val.
+ * @param key the Class of keys in the map
+ * @param val the Class of values in the map
+ * @param nullAllowed whether or not a value of null is valid
+ * @return a NestableFieldValidator for a Map of key to val
+ */
+ public static NestableFieldValidator mapFv(Class key, Class val,
+ boolean nullAllowed) {
+ return mapFv(fv(key, false), fv(val, false), nullAllowed);
+ }
+
+ /**
+ * Returns a new NestableFieldValidator for a Map.
+ * @param key a validator for the keys in the map
+ * @param val a validator for the values in the map
+ * @param nullAllowed whether or not a value of null is valid
+ * @return a NestableFieldValidator for a Map
+ */
+ public static NestableFieldValidator mapFv(final NestableFieldValidator key,
+ final NestableFieldValidator val, final boolean nullAllowed) {
+ return new NestableFieldValidator() {
+ @SuppressWarnings("unchecked")
+ @Override
+ public void validateField(String pd, String name, Object field)
+ throws IllegalArgumentException {
+ if (nullAllowed && field == null) {
+ return;
+ }
+ if (field instanceof Map) {
+ for (Map.Entry<Object, Object> entry: ((Map<Object, Object>)field).entrySet()) {
+ key.validateField("Each key of the map ", name, entry.getKey());
+ val.validateField("Each value in the map ", name, entry.getValue());
+ }
+ return;
+ }
+ throw new IllegalArgumentException(
+ "Field " + name + " must be a Map");
+ }
+ };
+ }
+
+ /**
+ * Validates a list of Numbers.
+ */
+ public static Object NumbersValidator = listFv(Number.class, true);
+
+ /**
+ * Validates a list of Strings.
+ */
+ public static Object StringsValidator = listFv(String.class, true);
+
+ /**
+ * Validates a map of Strings to Numbers.
+ */
+ public static Object MapOfStringToNumberValidator = mapFv(String.class, Number.class, true);
+
+ /**
+ * Validates a map of Strings to a map of Strings to a list.
+ * {str -> {str -> [str,str]}
+ */
+ public static Object MapOfStringToMapValidator = mapFv(fv(String.class, false), mapFv(fv(String.class, false), listFv(String.class, false), false), true);
+
+ /**
+ * Validates is a list of Maps.
+ */
+ public static Object MapsValidator = listFv(Map.class, true);
+
+ /**
+ * Validates a Integer.
+ */
+ public static Object IntegerValidator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null) {
+ // A null value is acceptable.
+ return;
+ }
+ final long i;
+ if (o instanceof Number &&
+ (i = ((Number)o).longValue()) == ((Number)o).doubleValue()) {
+ if (i <= Integer.MAX_VALUE && i >= Integer.MIN_VALUE) {
+ return;
+ }
+ }
+
+ throw new IllegalArgumentException("Field " + name + " must be an Integer within type range.");
+ }
+ };
+
+ /**
+ * Validates is a list of Integers.
+ */
+ public static Object IntegersValidator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object field)
+ throws IllegalArgumentException {
+ if (field == null) {
+ // A null value is acceptable.
+ return;
+ }
+ if (field instanceof Iterable) {
+ for (Object o : (Iterable)field) {
+ final long i;
+ if (o instanceof Number &&
+ ((i = ((Number)o).longValue()) == ((Number)o).doubleValue()) &&
+ (i <= Integer.MAX_VALUE && i >= Integer.MIN_VALUE)) {
+ // pass the test
+ } else {
+ throw new IllegalArgumentException(
+ "Each element of the list " + name + " must be an Integer within type range.");
+ }
+ }
+ return;
+ }
+ }
+ };
+
+ /**
+ * Validates a Double.
+ */
+ public static Object DoubleValidator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null) {
+ // A null value is acceptable.
+ return;
+ }
+
+ // we can provide a lenient way to convert int/long to double with losing some precision
+ if (o instanceof Number) {
+ return;
+ }
+
+ throw new IllegalArgumentException("Field " + name + " must be an Double.");
+ }
+ };
+
+ /**
+ * Validates a power of 2.
+ */
+ public static Object PowerOf2Validator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null) {
+ // A null value is acceptable.
+ return;
+ }
+ final long i;
+ if (o instanceof Number &&
+ (i = ((Number)o).longValue()) == ((Number)o).doubleValue())
+ {
+ // Test whether the integer is a power of 2.
+ if (i > 0 && (i & (i-1)) == 0) {
+ return;
+ }
+ }
+ throw new IllegalArgumentException("Field " + name + " must be a power of 2.");
+ }
+ };
+
+ /**
+ * Validates a positive integer.
+ */
+ public static Object PositiveIntegerValidator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null) {
+ // A null value is acceptable.
+ return;
+ }
+ final long i;
+ if (o instanceof Number &&
+ (i = ((Number)o).longValue()) == ((Number)o).doubleValue())
+ {
+ if (i > 0) {
+ return;
+ }
+ }
+ throw new IllegalArgumentException("Field " + name + " must be a positive integer.");
+ }
+ };
+
+ /**
+ * Validates Kryo Registration
+ */
+ public static Object KryoRegValidator = new FieldValidator() {
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null) {
+ // A null value is acceptable.
+ return;
+ }
+ if (o instanceof Iterable) {
+ for (Object e : (Iterable)o) {
+ if (e instanceof Map) {
+ for (Map.Entry<Object,Object> entry: ((Map<Object,Object>)e).entrySet()) {
+ if (!(entry.getKey() instanceof String) ||
+ !(entry.getValue() instanceof String)) {
+ throw new IllegalArgumentException(
+ "Each element of the list " + name + " must be a String or a Map of Strings");
+ }
+ }
+ } else if (!(e instanceof String)) {
+ throw new IllegalArgumentException(
+ "Each element of the list " + name + " must be a String or a Map of Strings");
+ }
+ }
+ return;
+ }
+ throw new IllegalArgumentException(
+ "Field " + name + " must be an Iterable containing only Strings or Maps of Strings");
+ }
+ };
+
+ /**
+ * Validates a String or a list of Strings
+ */
+ public static Object StringOrStringListValidator = new FieldValidator() {
+
+ private FieldValidator fv = listFv(String.class, false);
+
+ @Override
+ public void validateField(String name, Object o) throws IllegalArgumentException {
+ if (o == null || o instanceof String) {
+ // A null value or a String value is acceptable
+ return;
+ }
+ this.fv.validateField(name, o);
+ }
+ };
+}
[36/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/Nimbus.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/Nimbus.java b/jstorm-client/src/main/java/backtype/storm/generated/Nimbus.java
deleted file mode 100644
index 6f63240..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/Nimbus.java
+++ /dev/null
@@ -1,18351 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Nimbus {
-
- public interface Iface {
-
- public void submitTopology(String name, String uploadedJarLocation, String jsonConf, StormTopology topology) throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException;
-
- public void submitTopologyWithOpts(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options) throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException;
-
- public void killTopology(String name) throws NotAliveException, org.apache.thrift7.TException;
-
- public void killTopologyWithOpts(String name, KillOptions options) throws NotAliveException, org.apache.thrift7.TException;
-
- public void activate(String name) throws NotAliveException, org.apache.thrift7.TException;
-
- public void deactivate(String name) throws NotAliveException, org.apache.thrift7.TException;
-
- public void rebalance(String name, RebalanceOptions options) throws NotAliveException, InvalidTopologyException, org.apache.thrift7.TException;
-
- public void metricMonitor(String name, MonitorOptions options) throws NotAliveException, org.apache.thrift7.TException;
-
- public void restart(String name, String jsonConf) throws NotAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException;
-
- public void beginLibUpload(String libName) throws org.apache.thrift7.TException;
-
- public String beginFileUpload() throws org.apache.thrift7.TException;
-
- public void uploadChunk(String location, ByteBuffer chunk) throws org.apache.thrift7.TException;
-
- public void finishFileUpload(String location) throws org.apache.thrift7.TException;
-
- public String beginFileDownload(String file) throws org.apache.thrift7.TException;
-
- public ByteBuffer downloadChunk(String id) throws org.apache.thrift7.TException;
-
- public String getNimbusConf() throws org.apache.thrift7.TException;
-
- public ClusterSummary getClusterInfo() throws org.apache.thrift7.TException;
-
- public TopologyInfo getTopologyInfo(String id) throws NotAliveException, org.apache.thrift7.TException;
-
- public TopologyInfo getTopologyInfoByName(String topologyName) throws NotAliveException, org.apache.thrift7.TException;
-
- public SupervisorWorkers getSupervisorWorkers(String host) throws NotAliveException, org.apache.thrift7.TException;
-
- public String getTopologyConf(String id) throws NotAliveException, org.apache.thrift7.TException;
-
- public StormTopology getTopology(String id) throws NotAliveException, org.apache.thrift7.TException;
-
- public StormTopology getUserTopology(String id) throws NotAliveException, org.apache.thrift7.TException;
-
- public TopologyMetricInfo getTopologyMetric(String id) throws NotAliveException, org.apache.thrift7.TException;
-
- }
-
- public interface AsyncIface {
-
- public void submitTopology(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.submitTopology_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void submitTopologyWithOpts(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.submitTopologyWithOpts_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void killTopology(String name, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.killTopology_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void killTopologyWithOpts(String name, KillOptions options, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.killTopologyWithOpts_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void activate(String name, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.activate_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void deactivate(String name, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.deactivate_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void rebalance(String name, RebalanceOptions options, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.rebalance_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void metricMonitor(String name, MonitorOptions options, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.metricMonitor_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void restart(String name, String jsonConf, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.restart_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void beginLibUpload(String libName, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.beginLibUpload_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void beginFileUpload(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.beginFileUpload_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void uploadChunk(String location, ByteBuffer chunk, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.uploadChunk_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void finishFileUpload(String location, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.finishFileUpload_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void beginFileDownload(String file, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.beginFileDownload_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void downloadChunk(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.downloadChunk_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getNimbusConf(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getNimbusConf_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getClusterInfo(org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getClusterInfo_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getTopologyInfo(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getTopologyInfo_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getTopologyInfoByName(String topologyName, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getTopologyInfoByName_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getSupervisorWorkers(String host, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getSupervisorWorkers_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getTopologyConf(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getTopologyConf_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getTopology(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getTopology_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getUserTopology(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getUserTopology_call> resultHandler) throws org.apache.thrift7.TException;
-
- public void getTopologyMetric(String id, org.apache.thrift7.async.AsyncMethodCallback<AsyncClient.getTopologyMetric_call> resultHandler) throws org.apache.thrift7.TException;
-
- }
-
- public static class Client extends org.apache.thrift7.TServiceClient implements Iface {
- public static class Factory implements org.apache.thrift7.TServiceClientFactory<Client> {
- public Factory() {}
- public Client getClient(org.apache.thrift7.protocol.TProtocol prot) {
- return new Client(prot);
- }
- public Client getClient(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- return new Client(iprot, oprot);
- }
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol prot)
- {
- super(prot, prot);
- }
-
- public Client(org.apache.thrift7.protocol.TProtocol iprot, org.apache.thrift7.protocol.TProtocol oprot) {
- super(iprot, oprot);
- }
-
- public void submitTopology(String name, String uploadedJarLocation, String jsonConf, StormTopology topology) throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- send_submitTopology(name, uploadedJarLocation, jsonConf, topology);
- recv_submitTopology();
- }
-
- public void send_submitTopology(String name, String uploadedJarLocation, String jsonConf, StormTopology topology) throws org.apache.thrift7.TException
- {
- submitTopology_args args = new submitTopology_args();
- args.set_name(name);
- args.set_uploadedJarLocation(uploadedJarLocation);
- args.set_jsonConf(jsonConf);
- args.set_topology(topology);
- sendBase("submitTopology", args);
- }
-
- public void recv_submitTopology() throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- submitTopology_result result = new submitTopology_result();
- receiveBase(result, "submitTopology");
- if (result.e != null) {
- throw result.e;
- }
- if (result.ite != null) {
- throw result.ite;
- }
- if (result.tae != null) {
- throw result.tae;
- }
- return;
- }
-
- public void submitTopologyWithOpts(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options) throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- send_submitTopologyWithOpts(name, uploadedJarLocation, jsonConf, topology, options);
- recv_submitTopologyWithOpts();
- }
-
- public void send_submitTopologyWithOpts(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options) throws org.apache.thrift7.TException
- {
- submitTopologyWithOpts_args args = new submitTopologyWithOpts_args();
- args.set_name(name);
- args.set_uploadedJarLocation(uploadedJarLocation);
- args.set_jsonConf(jsonConf);
- args.set_topology(topology);
- args.set_options(options);
- sendBase("submitTopologyWithOpts", args);
- }
-
- public void recv_submitTopologyWithOpts() throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- submitTopologyWithOpts_result result = new submitTopologyWithOpts_result();
- receiveBase(result, "submitTopologyWithOpts");
- if (result.e != null) {
- throw result.e;
- }
- if (result.ite != null) {
- throw result.ite;
- }
- if (result.tae != null) {
- throw result.tae;
- }
- return;
- }
-
- public void killTopology(String name) throws NotAliveException, org.apache.thrift7.TException
- {
- send_killTopology(name);
- recv_killTopology();
- }
-
- public void send_killTopology(String name) throws org.apache.thrift7.TException
- {
- killTopology_args args = new killTopology_args();
- args.set_name(name);
- sendBase("killTopology", args);
- }
-
- public void recv_killTopology() throws NotAliveException, org.apache.thrift7.TException
- {
- killTopology_result result = new killTopology_result();
- receiveBase(result, "killTopology");
- if (result.e != null) {
- throw result.e;
- }
- return;
- }
-
- public void killTopologyWithOpts(String name, KillOptions options) throws NotAliveException, org.apache.thrift7.TException
- {
- send_killTopologyWithOpts(name, options);
- recv_killTopologyWithOpts();
- }
-
- public void send_killTopologyWithOpts(String name, KillOptions options) throws org.apache.thrift7.TException
- {
- killTopologyWithOpts_args args = new killTopologyWithOpts_args();
- args.set_name(name);
- args.set_options(options);
- sendBase("killTopologyWithOpts", args);
- }
-
- public void recv_killTopologyWithOpts() throws NotAliveException, org.apache.thrift7.TException
- {
- killTopologyWithOpts_result result = new killTopologyWithOpts_result();
- receiveBase(result, "killTopologyWithOpts");
- if (result.e != null) {
- throw result.e;
- }
- return;
- }
-
- public void activate(String name) throws NotAliveException, org.apache.thrift7.TException
- {
- send_activate(name);
- recv_activate();
- }
-
- public void send_activate(String name) throws org.apache.thrift7.TException
- {
- activate_args args = new activate_args();
- args.set_name(name);
- sendBase("activate", args);
- }
-
- public void recv_activate() throws NotAliveException, org.apache.thrift7.TException
- {
- activate_result result = new activate_result();
- receiveBase(result, "activate");
- if (result.e != null) {
- throw result.e;
- }
- return;
- }
-
- public void deactivate(String name) throws NotAliveException, org.apache.thrift7.TException
- {
- send_deactivate(name);
- recv_deactivate();
- }
-
- public void send_deactivate(String name) throws org.apache.thrift7.TException
- {
- deactivate_args args = new deactivate_args();
- args.set_name(name);
- sendBase("deactivate", args);
- }
-
- public void recv_deactivate() throws NotAliveException, org.apache.thrift7.TException
- {
- deactivate_result result = new deactivate_result();
- receiveBase(result, "deactivate");
- if (result.e != null) {
- throw result.e;
- }
- return;
- }
-
- public void rebalance(String name, RebalanceOptions options) throws NotAliveException, InvalidTopologyException, org.apache.thrift7.TException
- {
- send_rebalance(name, options);
- recv_rebalance();
- }
-
- public void send_rebalance(String name, RebalanceOptions options) throws org.apache.thrift7.TException
- {
- rebalance_args args = new rebalance_args();
- args.set_name(name);
- args.set_options(options);
- sendBase("rebalance", args);
- }
-
- public void recv_rebalance() throws NotAliveException, InvalidTopologyException, org.apache.thrift7.TException
- {
- rebalance_result result = new rebalance_result();
- receiveBase(result, "rebalance");
- if (result.e != null) {
- throw result.e;
- }
- if (result.ite != null) {
- throw result.ite;
- }
- return;
- }
-
- public void metricMonitor(String name, MonitorOptions options) throws NotAliveException, org.apache.thrift7.TException
- {
- send_metricMonitor(name, options);
- recv_metricMonitor();
- }
-
- public void send_metricMonitor(String name, MonitorOptions options) throws org.apache.thrift7.TException
- {
- metricMonitor_args args = new metricMonitor_args();
- args.set_name(name);
- args.set_options(options);
- sendBase("metricMonitor", args);
- }
-
- public void recv_metricMonitor() throws NotAliveException, org.apache.thrift7.TException
- {
- metricMonitor_result result = new metricMonitor_result();
- receiveBase(result, "metricMonitor");
- if (result.e != null) {
- throw result.e;
- }
- return;
- }
-
- public void restart(String name, String jsonConf) throws NotAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- send_restart(name, jsonConf);
- recv_restart();
- }
-
- public void send_restart(String name, String jsonConf) throws org.apache.thrift7.TException
- {
- restart_args args = new restart_args();
- args.set_name(name);
- args.set_jsonConf(jsonConf);
- sendBase("restart", args);
- }
-
- public void recv_restart() throws NotAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException
- {
- restart_result result = new restart_result();
- receiveBase(result, "restart");
- if (result.e != null) {
- throw result.e;
- }
- if (result.ite != null) {
- throw result.ite;
- }
- if (result.tae != null) {
- throw result.tae;
- }
- return;
- }
-
- public void beginLibUpload(String libName) throws org.apache.thrift7.TException
- {
- send_beginLibUpload(libName);
- recv_beginLibUpload();
- }
-
- public void send_beginLibUpload(String libName) throws org.apache.thrift7.TException
- {
- beginLibUpload_args args = new beginLibUpload_args();
- args.set_libName(libName);
- sendBase("beginLibUpload", args);
- }
-
- public void recv_beginLibUpload() throws org.apache.thrift7.TException
- {
- beginLibUpload_result result = new beginLibUpload_result();
- receiveBase(result, "beginLibUpload");
- return;
- }
-
- public String beginFileUpload() throws org.apache.thrift7.TException
- {
- send_beginFileUpload();
- return recv_beginFileUpload();
- }
-
- public void send_beginFileUpload() throws org.apache.thrift7.TException
- {
- beginFileUpload_args args = new beginFileUpload_args();
- sendBase("beginFileUpload", args);
- }
-
- public String recv_beginFileUpload() throws org.apache.thrift7.TException
- {
- beginFileUpload_result result = new beginFileUpload_result();
- receiveBase(result, "beginFileUpload");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "beginFileUpload failed: unknown result");
- }
-
- public void uploadChunk(String location, ByteBuffer chunk) throws org.apache.thrift7.TException
- {
- send_uploadChunk(location, chunk);
- recv_uploadChunk();
- }
-
- public void send_uploadChunk(String location, ByteBuffer chunk) throws org.apache.thrift7.TException
- {
- uploadChunk_args args = new uploadChunk_args();
- args.set_location(location);
- args.set_chunk(chunk);
- sendBase("uploadChunk", args);
- }
-
- public void recv_uploadChunk() throws org.apache.thrift7.TException
- {
- uploadChunk_result result = new uploadChunk_result();
- receiveBase(result, "uploadChunk");
- return;
- }
-
- public void finishFileUpload(String location) throws org.apache.thrift7.TException
- {
- send_finishFileUpload(location);
- recv_finishFileUpload();
- }
-
- public void send_finishFileUpload(String location) throws org.apache.thrift7.TException
- {
- finishFileUpload_args args = new finishFileUpload_args();
- args.set_location(location);
- sendBase("finishFileUpload", args);
- }
-
- public void recv_finishFileUpload() throws org.apache.thrift7.TException
- {
- finishFileUpload_result result = new finishFileUpload_result();
- receiveBase(result, "finishFileUpload");
- return;
- }
-
- public String beginFileDownload(String file) throws org.apache.thrift7.TException
- {
- send_beginFileDownload(file);
- return recv_beginFileDownload();
- }
-
- public void send_beginFileDownload(String file) throws org.apache.thrift7.TException
- {
- beginFileDownload_args args = new beginFileDownload_args();
- args.set_file(file);
- sendBase("beginFileDownload", args);
- }
-
- public String recv_beginFileDownload() throws org.apache.thrift7.TException
- {
- beginFileDownload_result result = new beginFileDownload_result();
- receiveBase(result, "beginFileDownload");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "beginFileDownload failed: unknown result");
- }
-
- public ByteBuffer downloadChunk(String id) throws org.apache.thrift7.TException
- {
- send_downloadChunk(id);
- return recv_downloadChunk();
- }
-
- public void send_downloadChunk(String id) throws org.apache.thrift7.TException
- {
- downloadChunk_args args = new downloadChunk_args();
- args.set_id(id);
- sendBase("downloadChunk", args);
- }
-
- public ByteBuffer recv_downloadChunk() throws org.apache.thrift7.TException
- {
- downloadChunk_result result = new downloadChunk_result();
- receiveBase(result, "downloadChunk");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "downloadChunk failed: unknown result");
- }
-
- public String getNimbusConf() throws org.apache.thrift7.TException
- {
- send_getNimbusConf();
- return recv_getNimbusConf();
- }
-
- public void send_getNimbusConf() throws org.apache.thrift7.TException
- {
- getNimbusConf_args args = new getNimbusConf_args();
- sendBase("getNimbusConf", args);
- }
-
- public String recv_getNimbusConf() throws org.apache.thrift7.TException
- {
- getNimbusConf_result result = new getNimbusConf_result();
- receiveBase(result, "getNimbusConf");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getNimbusConf failed: unknown result");
- }
-
- public ClusterSummary getClusterInfo() throws org.apache.thrift7.TException
- {
- send_getClusterInfo();
- return recv_getClusterInfo();
- }
-
- public void send_getClusterInfo() throws org.apache.thrift7.TException
- {
- getClusterInfo_args args = new getClusterInfo_args();
- sendBase("getClusterInfo", args);
- }
-
- public ClusterSummary recv_getClusterInfo() throws org.apache.thrift7.TException
- {
- getClusterInfo_result result = new getClusterInfo_result();
- receiveBase(result, "getClusterInfo");
- if (result.is_set_success()) {
- return result.success;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getClusterInfo failed: unknown result");
- }
-
- public TopologyInfo getTopologyInfo(String id) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getTopologyInfo(id);
- return recv_getTopologyInfo();
- }
-
- public void send_getTopologyInfo(String id) throws org.apache.thrift7.TException
- {
- getTopologyInfo_args args = new getTopologyInfo_args();
- args.set_id(id);
- sendBase("getTopologyInfo", args);
- }
-
- public TopologyInfo recv_getTopologyInfo() throws NotAliveException, org.apache.thrift7.TException
- {
- getTopologyInfo_result result = new getTopologyInfo_result();
- receiveBase(result, "getTopologyInfo");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getTopologyInfo failed: unknown result");
- }
-
- public TopologyInfo getTopologyInfoByName(String topologyName) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getTopologyInfoByName(topologyName);
- return recv_getTopologyInfoByName();
- }
-
- public void send_getTopologyInfoByName(String topologyName) throws org.apache.thrift7.TException
- {
- getTopologyInfoByName_args args = new getTopologyInfoByName_args();
- args.set_topologyName(topologyName);
- sendBase("getTopologyInfoByName", args);
- }
-
- public TopologyInfo recv_getTopologyInfoByName() throws NotAliveException, org.apache.thrift7.TException
- {
- getTopologyInfoByName_result result = new getTopologyInfoByName_result();
- receiveBase(result, "getTopologyInfoByName");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getTopologyInfoByName failed: unknown result");
- }
-
- public SupervisorWorkers getSupervisorWorkers(String host) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getSupervisorWorkers(host);
- return recv_getSupervisorWorkers();
- }
-
- public void send_getSupervisorWorkers(String host) throws org.apache.thrift7.TException
- {
- getSupervisorWorkers_args args = new getSupervisorWorkers_args();
- args.set_host(host);
- sendBase("getSupervisorWorkers", args);
- }
-
- public SupervisorWorkers recv_getSupervisorWorkers() throws NotAliveException, org.apache.thrift7.TException
- {
- getSupervisorWorkers_result result = new getSupervisorWorkers_result();
- receiveBase(result, "getSupervisorWorkers");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getSupervisorWorkers failed: unknown result");
- }
-
- public String getTopologyConf(String id) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getTopologyConf(id);
- return recv_getTopologyConf();
- }
-
- public void send_getTopologyConf(String id) throws org.apache.thrift7.TException
- {
- getTopologyConf_args args = new getTopologyConf_args();
- args.set_id(id);
- sendBase("getTopologyConf", args);
- }
-
- public String recv_getTopologyConf() throws NotAliveException, org.apache.thrift7.TException
- {
- getTopologyConf_result result = new getTopologyConf_result();
- receiveBase(result, "getTopologyConf");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getTopologyConf failed: unknown result");
- }
-
- public StormTopology getTopology(String id) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getTopology(id);
- return recv_getTopology();
- }
-
- public void send_getTopology(String id) throws org.apache.thrift7.TException
- {
- getTopology_args args = new getTopology_args();
- args.set_id(id);
- sendBase("getTopology", args);
- }
-
- public StormTopology recv_getTopology() throws NotAliveException, org.apache.thrift7.TException
- {
- getTopology_result result = new getTopology_result();
- receiveBase(result, "getTopology");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getTopology failed: unknown result");
- }
-
- public StormTopology getUserTopology(String id) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getUserTopology(id);
- return recv_getUserTopology();
- }
-
- public void send_getUserTopology(String id) throws org.apache.thrift7.TException
- {
- getUserTopology_args args = new getUserTopology_args();
- args.set_id(id);
- sendBase("getUserTopology", args);
- }
-
- public StormTopology recv_getUserTopology() throws NotAliveException, org.apache.thrift7.TException
- {
- getUserTopology_result result = new getUserTopology_result();
- receiveBase(result, "getUserTopology");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getUserTopology failed: unknown result");
- }
-
- public TopologyMetricInfo getTopologyMetric(String id) throws NotAliveException, org.apache.thrift7.TException
- {
- send_getTopologyMetric(id);
- return recv_getTopologyMetric();
- }
-
- public void send_getTopologyMetric(String id) throws org.apache.thrift7.TException
- {
- getTopologyMetric_args args = new getTopologyMetric_args();
- args.set_id(id);
- sendBase("getTopologyMetric", args);
- }
-
- public TopologyMetricInfo recv_getTopologyMetric() throws NotAliveException, org.apache.thrift7.TException
- {
- getTopologyMetric_result result = new getTopologyMetric_result();
- receiveBase(result, "getTopologyMetric");
- if (result.is_set_success()) {
- return result.success;
- }
- if (result.e != null) {
- throw result.e;
- }
- throw new org.apache.thrift7.TApplicationException(org.apache.thrift7.TApplicationException.MISSING_RESULT, "getTopologyMetric failed: unknown result");
- }
-
- }
- public static class AsyncClient extends org.apache.thrift7.async.TAsyncClient implements AsyncIface {
- public static class Factory implements org.apache.thrift7.async.TAsyncClientFactory<AsyncClient> {
- private org.apache.thrift7.async.TAsyncClientManager clientManager;
- private org.apache.thrift7.protocol.TProtocolFactory protocolFactory;
- public Factory(org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.protocol.TProtocolFactory protocolFactory) {
- this.clientManager = clientManager;
- this.protocolFactory = protocolFactory;
- }
- public AsyncClient getAsyncClient(org.apache.thrift7.transport.TNonblockingTransport transport) {
- return new AsyncClient(protocolFactory, clientManager, transport);
- }
- }
-
- public AsyncClient(org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.async.TAsyncClientManager clientManager, org.apache.thrift7.transport.TNonblockingTransport transport) {
- super(protocolFactory, clientManager, transport);
- }
-
- public void submitTopology(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, org.apache.thrift7.async.AsyncMethodCallback<submitTopology_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- submitTopology_call method_call = new submitTopology_call(name, uploadedJarLocation, jsonConf, topology, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class submitTopology_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private String uploadedJarLocation;
- private String jsonConf;
- private StormTopology topology;
- public submitTopology_call(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, org.apache.thrift7.async.AsyncMethodCallback<submitTopology_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.uploadedJarLocation = uploadedJarLocation;
- this.jsonConf = jsonConf;
- this.topology = topology;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("submitTopology", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- submitTopology_args args = new submitTopology_args();
- args.set_name(name);
- args.set_uploadedJarLocation(uploadedJarLocation);
- args.set_jsonConf(jsonConf);
- args.set_topology(topology);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_submitTopology();
- }
- }
-
- public void submitTopologyWithOpts(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options, org.apache.thrift7.async.AsyncMethodCallback<submitTopologyWithOpts_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- submitTopologyWithOpts_call method_call = new submitTopologyWithOpts_call(name, uploadedJarLocation, jsonConf, topology, options, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class submitTopologyWithOpts_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private String uploadedJarLocation;
- private String jsonConf;
- private StormTopology topology;
- private SubmitOptions options;
- public submitTopologyWithOpts_call(String name, String uploadedJarLocation, String jsonConf, StormTopology topology, SubmitOptions options, org.apache.thrift7.async.AsyncMethodCallback<submitTopologyWithOpts_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.uploadedJarLocation = uploadedJarLocation;
- this.jsonConf = jsonConf;
- this.topology = topology;
- this.options = options;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("submitTopologyWithOpts", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- submitTopologyWithOpts_args args = new submitTopologyWithOpts_args();
- args.set_name(name);
- args.set_uploadedJarLocation(uploadedJarLocation);
- args.set_jsonConf(jsonConf);
- args.set_topology(topology);
- args.set_options(options);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws AlreadyAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_submitTopologyWithOpts();
- }
- }
-
- public void killTopology(String name, org.apache.thrift7.async.AsyncMethodCallback<killTopology_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- killTopology_call method_call = new killTopology_call(name, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class killTopology_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- public killTopology_call(String name, org.apache.thrift7.async.AsyncMethodCallback<killTopology_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("killTopology", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- killTopology_args args = new killTopology_args();
- args.set_name(name);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_killTopology();
- }
- }
-
- public void killTopologyWithOpts(String name, KillOptions options, org.apache.thrift7.async.AsyncMethodCallback<killTopologyWithOpts_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- killTopologyWithOpts_call method_call = new killTopologyWithOpts_call(name, options, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class killTopologyWithOpts_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private KillOptions options;
- public killTopologyWithOpts_call(String name, KillOptions options, org.apache.thrift7.async.AsyncMethodCallback<killTopologyWithOpts_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.options = options;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("killTopologyWithOpts", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- killTopologyWithOpts_args args = new killTopologyWithOpts_args();
- args.set_name(name);
- args.set_options(options);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_killTopologyWithOpts();
- }
- }
-
- public void activate(String name, org.apache.thrift7.async.AsyncMethodCallback<activate_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- activate_call method_call = new activate_call(name, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class activate_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- public activate_call(String name, org.apache.thrift7.async.AsyncMethodCallback<activate_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("activate", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- activate_args args = new activate_args();
- args.set_name(name);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_activate();
- }
- }
-
- public void deactivate(String name, org.apache.thrift7.async.AsyncMethodCallback<deactivate_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- deactivate_call method_call = new deactivate_call(name, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class deactivate_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- public deactivate_call(String name, org.apache.thrift7.async.AsyncMethodCallback<deactivate_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("deactivate", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- deactivate_args args = new deactivate_args();
- args.set_name(name);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_deactivate();
- }
- }
-
- public void rebalance(String name, RebalanceOptions options, org.apache.thrift7.async.AsyncMethodCallback<rebalance_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- rebalance_call method_call = new rebalance_call(name, options, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class rebalance_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private RebalanceOptions options;
- public rebalance_call(String name, RebalanceOptions options, org.apache.thrift7.async.AsyncMethodCallback<rebalance_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.options = options;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("rebalance", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- rebalance_args args = new rebalance_args();
- args.set_name(name);
- args.set_options(options);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, InvalidTopologyException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_rebalance();
- }
- }
-
- public void metricMonitor(String name, MonitorOptions options, org.apache.thrift7.async.AsyncMethodCallback<metricMonitor_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- metricMonitor_call method_call = new metricMonitor_call(name, options, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class metricMonitor_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private MonitorOptions options;
- public metricMonitor_call(String name, MonitorOptions options, org.apache.thrift7.async.AsyncMethodCallback<metricMonitor_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.options = options;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("metricMonitor", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- metricMonitor_args args = new metricMonitor_args();
- args.set_name(name);
- args.set_options(options);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_metricMonitor();
- }
- }
-
- public void restart(String name, String jsonConf, org.apache.thrift7.async.AsyncMethodCallback<restart_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- restart_call method_call = new restart_call(name, jsonConf, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class restart_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String name;
- private String jsonConf;
- public restart_call(String name, String jsonConf, org.apache.thrift7.async.AsyncMethodCallback<restart_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.name = name;
- this.jsonConf = jsonConf;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("restart", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- restart_args args = new restart_args();
- args.set_name(name);
- args.set_jsonConf(jsonConf);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws NotAliveException, InvalidTopologyException, TopologyAssignException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_restart();
- }
- }
-
- public void beginLibUpload(String libName, org.apache.thrift7.async.AsyncMethodCallback<beginLibUpload_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- beginLibUpload_call method_call = new beginLibUpload_call(libName, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class beginLibUpload_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String libName;
- public beginLibUpload_call(String libName, org.apache.thrift7.async.AsyncMethodCallback<beginLibUpload_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.libName = libName;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("beginLibUpload", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- beginLibUpload_args args = new beginLibUpload_args();
- args.set_libName(libName);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_beginLibUpload();
- }
- }
-
- public void beginFileUpload(org.apache.thrift7.async.AsyncMethodCallback<beginFileUpload_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- beginFileUpload_call method_call = new beginFileUpload_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class beginFileUpload_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public beginFileUpload_call(org.apache.thrift7.async.AsyncMethodCallback<beginFileUpload_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("beginFileUpload", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- beginFileUpload_args args = new beginFileUpload_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_beginFileUpload();
- }
- }
-
- public void uploadChunk(String location, ByteBuffer chunk, org.apache.thrift7.async.AsyncMethodCallback<uploadChunk_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- uploadChunk_call method_call = new uploadChunk_call(location, chunk, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class uploadChunk_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String location;
- private ByteBuffer chunk;
- public uploadChunk_call(String location, ByteBuffer chunk, org.apache.thrift7.async.AsyncMethodCallback<uploadChunk_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.location = location;
- this.chunk = chunk;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("uploadChunk", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- uploadChunk_args args = new uploadChunk_args();
- args.set_location(location);
- args.set_chunk(chunk);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_uploadChunk();
- }
- }
-
- public void finishFileUpload(String location, org.apache.thrift7.async.AsyncMethodCallback<finishFileUpload_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- finishFileUpload_call method_call = new finishFileUpload_call(location, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class finishFileUpload_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String location;
- public finishFileUpload_call(String location, org.apache.thrift7.async.AsyncMethodCallback<finishFileUpload_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.location = location;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("finishFileUpload", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- finishFileUpload_args args = new finishFileUpload_args();
- args.set_location(location);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public void getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- (new Client(prot)).recv_finishFileUpload();
- }
- }
-
- public void beginFileDownload(String file, org.apache.thrift7.async.AsyncMethodCallback<beginFileDownload_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- beginFileDownload_call method_call = new beginFileDownload_call(file, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class beginFileDownload_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String file;
- public beginFileDownload_call(String file, org.apache.thrift7.async.AsyncMethodCallback<beginFileDownload_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.file = file;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("beginFileDownload", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- beginFileDownload_args args = new beginFileDownload_args();
- args.set_file(file);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_beginFileDownload();
- }
- }
-
- public void downloadChunk(String id, org.apache.thrift7.async.AsyncMethodCallback<downloadChunk_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- downloadChunk_call method_call = new downloadChunk_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class downloadChunk_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public downloadChunk_call(String id, org.apache.thrift7.async.AsyncMethodCallback<downloadChunk_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("downloadChunk", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- downloadChunk_args args = new downloadChunk_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public ByteBuffer getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_downloadChunk();
- }
- }
-
- public void getNimbusConf(org.apache.thrift7.async.AsyncMethodCallback<getNimbusConf_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getNimbusConf_call method_call = new getNimbusConf_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getNimbusConf_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public getNimbusConf_call(org.apache.thrift7.async.AsyncMethodCallback<getNimbusConf_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getNimbusConf", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getNimbusConf_args args = new getNimbusConf_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getNimbusConf();
- }
- }
-
- public void getClusterInfo(org.apache.thrift7.async.AsyncMethodCallback<getClusterInfo_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getClusterInfo_call method_call = new getClusterInfo_call(resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getClusterInfo_call extends org.apache.thrift7.async.TAsyncMethodCall {
- public getClusterInfo_call(org.apache.thrift7.async.AsyncMethodCallback<getClusterInfo_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getClusterInfo", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getClusterInfo_args args = new getClusterInfo_args();
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public ClusterSummary getResult() throws org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getClusterInfo();
- }
- }
-
- public void getTopologyInfo(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyInfo_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getTopologyInfo_call method_call = new getTopologyInfo_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getTopologyInfo_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public getTopologyInfo_call(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyInfo_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getTopologyInfo", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getTopologyInfo_args args = new getTopologyInfo_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public TopologyInfo getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getTopologyInfo();
- }
- }
-
- public void getTopologyInfoByName(String topologyName, org.apache.thrift7.async.AsyncMethodCallback<getTopologyInfoByName_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getTopologyInfoByName_call method_call = new getTopologyInfoByName_call(topologyName, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getTopologyInfoByName_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String topologyName;
- public getTopologyInfoByName_call(String topologyName, org.apache.thrift7.async.AsyncMethodCallback<getTopologyInfoByName_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.topologyName = topologyName;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getTopologyInfoByName", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getTopologyInfoByName_args args = new getTopologyInfoByName_args();
- args.set_topologyName(topologyName);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public TopologyInfo getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getTopologyInfoByName();
- }
- }
-
- public void getSupervisorWorkers(String host, org.apache.thrift7.async.AsyncMethodCallback<getSupervisorWorkers_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getSupervisorWorkers_call method_call = new getSupervisorWorkers_call(host, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getSupervisorWorkers_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String host;
- public getSupervisorWorkers_call(String host, org.apache.thrift7.async.AsyncMethodCallback<getSupervisorWorkers_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.host = host;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getSupervisorWorkers", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getSupervisorWorkers_args args = new getSupervisorWorkers_args();
- args.set_host(host);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public SupervisorWorkers getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getSupervisorWorkers();
- }
- }
-
- public void getTopologyConf(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyConf_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getTopologyConf_call method_call = new getTopologyConf_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getTopologyConf_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public getTopologyConf_call(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyConf_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getTopologyConf", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getTopologyConf_args args = new getTopologyConf_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public String getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getTopologyConf();
- }
- }
-
- public void getTopology(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopology_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getTopology_call method_call = new getTopology_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getTopology_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public getTopology_call(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopology_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getTopology", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getTopology_args args = new getTopology_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public StormTopology getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getTopology();
- }
- }
-
- public void getUserTopology(String id, org.apache.thrift7.async.AsyncMethodCallback<getUserTopology_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getUserTopology_call method_call = new getUserTopology_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getUserTopology_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public getUserTopology_call(String id, org.apache.thrift7.async.AsyncMethodCallback<getUserTopology_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getUserTopology", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getUserTopology_args args = new getUserTopology_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public StormTopology getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getUserTopology();
- }
- }
-
- public void getTopologyMetric(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyMetric_call> resultHandler) throws org.apache.thrift7.TException {
- checkReady();
- getTopologyMetric_call method_call = new getTopologyMetric_call(id, resultHandler, this, ___protocolFactory, ___transport);
- this.___currentMethod = method_call;
- ___manager.call(method_call);
- }
-
- public static class getTopologyMetric_call extends org.apache.thrift7.async.TAsyncMethodCall {
- private String id;
- public getTopologyMetric_call(String id, org.apache.thrift7.async.AsyncMethodCallback<getTopologyMetric_call> resultHandler, org.apache.thrift7.async.TAsyncClient client, org.apache.thrift7.protocol.TProtocolFactory protocolFactory, org.apache.thrift7.transport.TNonblockingTransport transport) throws org.apache.thrift7.TException {
- super(client, protocolFactory, transport, resultHandler, false);
- this.id = id;
- }
-
- public void write_args(org.apache.thrift7.protocol.TProtocol prot) throws org.apache.thrift7.TException {
- prot.writeMessageBegin(new org.apache.thrift7.protocol.TMessage("getTopologyMetric", org.apache.thrift7.protocol.TMessageType.CALL, 0));
- getTopologyMetric_args args = new getTopologyMetric_args();
- args.set_id(id);
- args.write(prot);
- prot.writeMessageEnd();
- }
-
- public TopologyMetricInfo getResult() throws NotAliveException, org.apache.thrift7.TException {
- if (getState() != org.apache.thrift7.async.TAsyncMethodCall.State.RESPONSE_READ) {
- throw new IllegalStateException("Method call not finished!");
- }
- org.apache.thrift7.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift7.transport.TMemoryInputTransport(getFrameBuffer().array());
- org.apache.thrift7.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
- return (new Client(prot)).recv_getTopologyMetric();
- }
- }
-
- }
-
- public static class Processor<I extends Iface> extends org.apache.thrift7.TBaseProcessor implements org.apache.thrift7.TProcessor {
- private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
- public Processor(I iface) {
- super(iface, getProcessMap(new HashMap<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>>()));
- }
-
- protected Processor(I iface, Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- super(iface, getProcessMap(processMap));
- }
-
- private static <I extends Iface> Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> getProcessMap(Map<String, org.apache.thrift7.ProcessFunction<I, ? extends org.apache.thrift7.TBase>> processMap) {
- processMap.put("submitTopology", new submitTopology());
- processMap.put("submitTopologyWithOpts", new submitTopologyWithOpts());
- processMap.put("killTopology", new killTopology());
- processMap.put("killTopologyWithOpts", new killTopologyWithOpts());
- processMap.put("activate", new activate());
- processMap.put("deactivate", new deactivate());
- processMap.put("rebalance", new rebalance());
- processMap.put("metricMonitor", new metricMonitor());
- processMap.put("restart", new restart());
- processMap.put("beginLibUpload", new beginLibUpload());
- processMap.put("beginFileUpload", new beginFileUpload());
- processMap.put("uploadChunk", new uploadChunk());
- processMap.put("finishFileUpload", new finishFileUpload());
- processMap.put("beginFileDownload", new beginFileDownload());
- processMap.put("downloadChunk", new downloadChunk());
- processMap.put("getNimbusConf", new getNimbusConf());
- processMap.put("getClusterInfo", new getClusterInfo());
- processMap.put("getTopologyInfo", new getTopologyInfo());
- processMap.put("getTopologyInfoByName", new getTopologyInfoByName());
- processMap.put("getSupervisorWorkers", new getSupervisorWorkers());
- processMap.put("getTopologyConf", new getTopologyConf());
- processMap.put("getTopology", new getTopology());
- processMap.put("getUserTopology", new getUserTopology());
- processMap.put("getTopologyMetric", new getTopologyMetric());
- return processMap;
- }
-
- private static class submitTopology<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, submitTopology_args> {
- public submitTopology() {
- super("submitTopology");
- }
-
- protected submitTopology_args getEmptyArgsInstance() {
- return new submitTopology_args();
- }
-
- protected submitTopology_result getResult(I iface, submitTopology_args args) throws org.apache.thrift7.TException {
- submitTopology_result result = new submitTopology_result();
- try {
- iface.submitTopology(args.name, args.uploadedJarLocation, args.jsonConf, args.topology);
- } catch (AlreadyAliveException e) {
- result.e = e;
- } catch (InvalidTopologyException ite) {
- result.ite = ite;
- } catch (TopologyAssignException tae) {
- result.tae = tae;
- }
- return result;
- }
- }
-
- private static class submitTopologyWithOpts<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, submitTopologyWithOpts_args> {
- public submitTopologyWithOpts() {
- super("submitTopologyWithOpts");
- }
-
- protected submitTopologyWithOpts_args getEmptyArgsInstance() {
- return new submitTopologyWithOpts_args();
- }
-
- protected submitTopologyWithOpts_result getResult(I iface, submitTopologyWithOpts_args args) throws org.apache.thrift7.TException {
- submitTopologyWithOpts_result result = new submitTopologyWithOpts_result();
- try {
- iface.submitTopologyWithOpts(args.name, args.uploadedJarLocation, args.jsonConf, args.topology, args.options);
- } catch (AlreadyAliveException e) {
- result.e = e;
- } catch (InvalidTopologyException ite) {
- result.ite = ite;
- } catch (TopologyAssignException tae) {
- result.tae = tae;
- }
- return result;
- }
- }
-
- private static class killTopology<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, killTopology_args> {
- public killTopology() {
- super("killTopology");
- }
-
- protected killTopology_args getEmptyArgsInstance() {
- return new killTopology_args();
- }
-
- protected killTopology_result getResult(I iface, killTopology_args args) throws org.apache.thrift7.TException {
- killTopology_result result = new killTopology_result();
- try {
- iface.killTopology(args.name);
- } catch (NotAliveException e) {
- result.e = e;
- }
- return result;
- }
- }
-
- private static class killTopologyWithOpts<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, killTopologyWithOpts_args> {
- public killTopologyWithOpts() {
- super("killTopologyWithOpts");
- }
-
- protected killTopologyWithOpts_args getEmptyArgsInstance() {
- return new killTopologyWithOpts_args();
- }
-
- protected killTopologyWithOpts_result getResult(I iface, killTopologyWithOpts_args args) throws org.apache.thrift7.TException {
- killTopologyWithOpts_result result = new killTopologyWithOpts_result();
- try {
- iface.killTopologyWithOpts(args.name, args.options);
- } catch (NotAliveException e) {
- result.e = e;
- }
- return result;
- }
- }
-
- private static class activate<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, activate_args> {
- public activate() {
- super("activate");
- }
-
- protected activate_args getEmptyArgsInstance() {
- return new activate_args();
- }
-
- protected activate_result getResult(I iface, activate_args args) throws org.apache.thrift7.TException {
- activate_result result = new activate_result();
- try {
- iface.activate(args.name);
- } catch (NotAliveException e) {
- result.e = e;
- }
- return result;
- }
- }
-
- private static class deactivate<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, deactivate_args> {
- public deactivate() {
- super("deactivate");
- }
-
- protected deactivate_args getEmptyArgsInstance() {
- return new deactivate_args();
- }
-
- protected deactivate_result getResult(I iface, deactivate_args args) throws org.apache.thrift7.TException {
- deactivate_result result = new deactivate_result();
- try {
- iface.deactivate(args.name);
- } catch (NotAliveException e) {
- result.e = e;
- }
- return result;
- }
- }
-
- private static class rebalance<I extends Iface> extends org.apache.thrift7.ProcessFunction<I, rebalance_args> {
- public rebalance() {
- super("rebalance");
- }
-
- protected rebalance_args getEmptyArgsInstance() {
- return new rebalance_args();
- }
-
- protected rebalance_result getResult(I iface, rebalance_args args) throws org.apache.thrift7.TException {
- rebalance_result result = new rebalance_result();
- try {
- iface.rebalance(args.name, args.options);
- } catch (NotAliveException e) {
- result.e = e;
- } catch (InvalidTopologyException ite) {
-
<TRUNCATED>
[02/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/JavaObject.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/JavaObject.java b/jstorm-core/src/main/java/backtype/storm/generated/JavaObject.java
new file mode 100644
index 0000000..5998993
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/JavaObject.java
@@ -0,0 +1,544 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class JavaObject implements org.apache.thrift.TBase<JavaObject, JavaObject._Fields>, java.io.Serializable, Cloneable, Comparable<JavaObject> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("JavaObject");
+
+ private static final org.apache.thrift.protocol.TField FULL_CLASS_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("full_class_name", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField ARGS_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("args_list", org.apache.thrift.protocol.TType.LIST, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new JavaObjectStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new JavaObjectTupleSchemeFactory());
+ }
+
+ private String full_class_name; // required
+ private List<JavaObjectArg> args_list; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ FULL_CLASS_NAME((short)1, "full_class_name"),
+ ARGS_LIST((short)2, "args_list");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // FULL_CLASS_NAME
+ return FULL_CLASS_NAME;
+ case 2: // ARGS_LIST
+ return ARGS_LIST;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FULL_CLASS_NAME, new org.apache.thrift.meta_data.FieldMetaData("full_class_name", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.ARGS_LIST, new org.apache.thrift.meta_data.FieldMetaData("args_list", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, JavaObjectArg.class))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(JavaObject.class, metaDataMap);
+ }
+
+ public JavaObject() {
+ }
+
+ public JavaObject(
+ String full_class_name,
+ List<JavaObjectArg> args_list)
+ {
+ this();
+ this.full_class_name = full_class_name;
+ this.args_list = args_list;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public JavaObject(JavaObject other) {
+ if (other.is_set_full_class_name()) {
+ this.full_class_name = other.full_class_name;
+ }
+ if (other.is_set_args_list()) {
+ List<JavaObjectArg> __this__args_list = new ArrayList<JavaObjectArg>(other.args_list.size());
+ for (JavaObjectArg other_element : other.args_list) {
+ __this__args_list.add(new JavaObjectArg(other_element));
+ }
+ this.args_list = __this__args_list;
+ }
+ }
+
+ public JavaObject deepCopy() {
+ return new JavaObject(this);
+ }
+
+ @Override
+ public void clear() {
+ this.full_class_name = null;
+ this.args_list = null;
+ }
+
+ public String get_full_class_name() {
+ return this.full_class_name;
+ }
+
+ public void set_full_class_name(String full_class_name) {
+ this.full_class_name = full_class_name;
+ }
+
+ public void unset_full_class_name() {
+ this.full_class_name = null;
+ }
+
+ /** Returns true if field full_class_name is set (has been assigned a value) and false otherwise */
+ public boolean is_set_full_class_name() {
+ return this.full_class_name != null;
+ }
+
+ public void set_full_class_name_isSet(boolean value) {
+ if (!value) {
+ this.full_class_name = null;
+ }
+ }
+
+ public int get_args_list_size() {
+ return (this.args_list == null) ? 0 : this.args_list.size();
+ }
+
+ public java.util.Iterator<JavaObjectArg> get_args_list_iterator() {
+ return (this.args_list == null) ? null : this.args_list.iterator();
+ }
+
+ public void add_to_args_list(JavaObjectArg elem) {
+ if (this.args_list == null) {
+ this.args_list = new ArrayList<JavaObjectArg>();
+ }
+ this.args_list.add(elem);
+ }
+
+ public List<JavaObjectArg> get_args_list() {
+ return this.args_list;
+ }
+
+ public void set_args_list(List<JavaObjectArg> args_list) {
+ this.args_list = args_list;
+ }
+
+ public void unset_args_list() {
+ this.args_list = null;
+ }
+
+ /** Returns true if field args_list is set (has been assigned a value) and false otherwise */
+ public boolean is_set_args_list() {
+ return this.args_list != null;
+ }
+
+ public void set_args_list_isSet(boolean value) {
+ if (!value) {
+ this.args_list = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case FULL_CLASS_NAME:
+ if (value == null) {
+ unset_full_class_name();
+ } else {
+ set_full_class_name((String)value);
+ }
+ break;
+
+ case ARGS_LIST:
+ if (value == null) {
+ unset_args_list();
+ } else {
+ set_args_list((List<JavaObjectArg>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case FULL_CLASS_NAME:
+ return get_full_class_name();
+
+ case ARGS_LIST:
+ return get_args_list();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case FULL_CLASS_NAME:
+ return is_set_full_class_name();
+ case ARGS_LIST:
+ return is_set_args_list();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof JavaObject)
+ return this.equals((JavaObject)that);
+ return false;
+ }
+
+ public boolean equals(JavaObject that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_full_class_name = true && this.is_set_full_class_name();
+ boolean that_present_full_class_name = true && that.is_set_full_class_name();
+ if (this_present_full_class_name || that_present_full_class_name) {
+ if (!(this_present_full_class_name && that_present_full_class_name))
+ return false;
+ if (!this.full_class_name.equals(that.full_class_name))
+ return false;
+ }
+
+ boolean this_present_args_list = true && this.is_set_args_list();
+ boolean that_present_args_list = true && that.is_set_args_list();
+ if (this_present_args_list || that_present_args_list) {
+ if (!(this_present_args_list && that_present_args_list))
+ return false;
+ if (!this.args_list.equals(that.args_list))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_full_class_name = true && (is_set_full_class_name());
+ list.add(present_full_class_name);
+ if (present_full_class_name)
+ list.add(full_class_name);
+
+ boolean present_args_list = true && (is_set_args_list());
+ list.add(present_args_list);
+ if (present_args_list)
+ list.add(args_list);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(JavaObject other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_full_class_name()).compareTo(other.is_set_full_class_name());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_full_class_name()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.full_class_name, other.full_class_name);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_args_list()).compareTo(other.is_set_args_list());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_args_list()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.args_list, other.args_list);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("JavaObject(");
+ boolean first = true;
+
+ sb.append("full_class_name:");
+ if (this.full_class_name == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.full_class_name);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("args_list:");
+ if (this.args_list == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.args_list);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_full_class_name()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'full_class_name' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_args_list()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'args_list' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class JavaObjectStandardSchemeFactory implements SchemeFactory {
+ public JavaObjectStandardScheme getScheme() {
+ return new JavaObjectStandardScheme();
+ }
+ }
+
+ private static class JavaObjectStandardScheme extends StandardScheme<JavaObject> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, JavaObject struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // FULL_CLASS_NAME
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.full_class_name = iprot.readString();
+ struct.set_full_class_name_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // ARGS_LIST
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list0 = iprot.readListBegin();
+ struct.args_list = new ArrayList<JavaObjectArg>(_list0.size);
+ JavaObjectArg _elem1;
+ for (int _i2 = 0; _i2 < _list0.size; ++_i2)
+ {
+ _elem1 = new JavaObjectArg();
+ _elem1.read(iprot);
+ struct.args_list.add(_elem1);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_args_list_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, JavaObject struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.full_class_name != null) {
+ oprot.writeFieldBegin(FULL_CLASS_NAME_FIELD_DESC);
+ oprot.writeString(struct.full_class_name);
+ oprot.writeFieldEnd();
+ }
+ if (struct.args_list != null) {
+ oprot.writeFieldBegin(ARGS_LIST_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.args_list.size()));
+ for (JavaObjectArg _iter3 : struct.args_list)
+ {
+ _iter3.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class JavaObjectTupleSchemeFactory implements SchemeFactory {
+ public JavaObjectTupleScheme getScheme() {
+ return new JavaObjectTupleScheme();
+ }
+ }
+
+ private static class JavaObjectTupleScheme extends TupleScheme<JavaObject> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, JavaObject struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.full_class_name);
+ {
+ oprot.writeI32(struct.args_list.size());
+ for (JavaObjectArg _iter4 : struct.args_list)
+ {
+ _iter4.write(oprot);
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, JavaObject struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.full_class_name = iprot.readString();
+ struct.set_full_class_name_isSet(true);
+ {
+ org.apache.thrift.protocol.TList _list5 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.args_list = new ArrayList<JavaObjectArg>(_list5.size);
+ JavaObjectArg _elem6;
+ for (int _i7 = 0; _i7 < _list5.size; ++_i7)
+ {
+ _elem6 = new JavaObjectArg();
+ _elem6.read(iprot);
+ struct.args_list.add(_elem6);
+ }
+ }
+ struct.set_args_list_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/JavaObjectArg.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/JavaObjectArg.java b/jstorm-core/src/main/java/backtype/storm/generated/JavaObjectArg.java
new file mode 100755
index 0000000..4469306
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/JavaObjectArg.java
@@ -0,0 +1,614 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+public class JavaObjectArg extends org.apache.thrift.TUnion<JavaObjectArg, JavaObjectArg._Fields> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("JavaObjectArg");
+ private static final org.apache.thrift.protocol.TField INT_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("int_arg", org.apache.thrift.protocol.TType.I32, (short)1);
+ private static final org.apache.thrift.protocol.TField LONG_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("long_arg", org.apache.thrift.protocol.TType.I64, (short)2);
+ private static final org.apache.thrift.protocol.TField STRING_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("string_arg", org.apache.thrift.protocol.TType.STRING, (short)3);
+ private static final org.apache.thrift.protocol.TField BOOL_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("bool_arg", org.apache.thrift.protocol.TType.BOOL, (short)4);
+ private static final org.apache.thrift.protocol.TField BINARY_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("binary_arg", org.apache.thrift.protocol.TType.STRING, (short)5);
+ private static final org.apache.thrift.protocol.TField DOUBLE_ARG_FIELD_DESC = new org.apache.thrift.protocol.TField("double_arg", org.apache.thrift.protocol.TType.DOUBLE, (short)6);
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ INT_ARG((short)1, "int_arg"),
+ LONG_ARG((short)2, "long_arg"),
+ STRING_ARG((short)3, "string_arg"),
+ BOOL_ARG((short)4, "bool_arg"),
+ BINARY_ARG((short)5, "binary_arg"),
+ DOUBLE_ARG((short)6, "double_arg");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // INT_ARG
+ return INT_ARG;
+ case 2: // LONG_ARG
+ return LONG_ARG;
+ case 3: // STRING_ARG
+ return STRING_ARG;
+ case 4: // BOOL_ARG
+ return BOOL_ARG;
+ case 5: // BINARY_ARG
+ return BINARY_ARG;
+ case 6: // DOUBLE_ARG
+ return DOUBLE_ARG;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.INT_ARG, new org.apache.thrift.meta_data.FieldMetaData("int_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ tmpMap.put(_Fields.LONG_ARG, new org.apache.thrift.meta_data.FieldMetaData("long_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+ tmpMap.put(_Fields.STRING_ARG, new org.apache.thrift.meta_data.FieldMetaData("string_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.BOOL_ARG, new org.apache.thrift.meta_data.FieldMetaData("bool_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL)));
+ tmpMap.put(_Fields.BINARY_ARG, new org.apache.thrift.meta_data.FieldMetaData("binary_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.DOUBLE_ARG, new org.apache.thrift.meta_data.FieldMetaData("double_arg", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.DOUBLE)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(JavaObjectArg.class, metaDataMap);
+ }
+
+ public JavaObjectArg() {
+ super();
+ }
+
+ public JavaObjectArg(_Fields setField, Object value) {
+ super(setField, value);
+ }
+
+ public JavaObjectArg(JavaObjectArg other) {
+ super(other);
+ }
+ public JavaObjectArg deepCopy() {
+ return new JavaObjectArg(this);
+ }
+
+ public static JavaObjectArg int_arg(int value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_int_arg(value);
+ return x;
+ }
+
+ public static JavaObjectArg long_arg(long value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_long_arg(value);
+ return x;
+ }
+
+ public static JavaObjectArg string_arg(String value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_string_arg(value);
+ return x;
+ }
+
+ public static JavaObjectArg bool_arg(boolean value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_bool_arg(value);
+ return x;
+ }
+
+ public static JavaObjectArg binary_arg(ByteBuffer value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_binary_arg(value);
+ return x;
+ }
+
+ public static JavaObjectArg binary_arg(byte[] value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_binary_arg(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ return x;
+ }
+
+ public static JavaObjectArg double_arg(double value) {
+ JavaObjectArg x = new JavaObjectArg();
+ x.set_double_arg(value);
+ return x;
+ }
+
+
+ @Override
+ protected void checkType(_Fields setField, Object value) throws ClassCastException {
+ switch (setField) {
+ case INT_ARG:
+ if (value instanceof Integer) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type Integer for field 'int_arg', but got " + value.getClass().getSimpleName());
+ case LONG_ARG:
+ if (value instanceof Long) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type Long for field 'long_arg', but got " + value.getClass().getSimpleName());
+ case STRING_ARG:
+ if (value instanceof String) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type String for field 'string_arg', but got " + value.getClass().getSimpleName());
+ case BOOL_ARG:
+ if (value instanceof Boolean) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type Boolean for field 'bool_arg', but got " + value.getClass().getSimpleName());
+ case BINARY_ARG:
+ if (value instanceof ByteBuffer) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type ByteBuffer for field 'binary_arg', but got " + value.getClass().getSimpleName());
+ case DOUBLE_ARG:
+ if (value instanceof Double) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type Double for field 'double_arg', but got " + value.getClass().getSimpleName());
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(field.id);
+ if (setField != null) {
+ switch (setField) {
+ case INT_ARG:
+ if (field.type == INT_ARG_FIELD_DESC.type) {
+ Integer int_arg;
+ int_arg = iprot.readI32();
+ return int_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case LONG_ARG:
+ if (field.type == LONG_ARG_FIELD_DESC.type) {
+ Long long_arg;
+ long_arg = iprot.readI64();
+ return long_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case STRING_ARG:
+ if (field.type == STRING_ARG_FIELD_DESC.type) {
+ String string_arg;
+ string_arg = iprot.readString();
+ return string_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case BOOL_ARG:
+ if (field.type == BOOL_ARG_FIELD_DESC.type) {
+ Boolean bool_arg;
+ bool_arg = iprot.readBool();
+ return bool_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case BINARY_ARG:
+ if (field.type == BINARY_ARG_FIELD_DESC.type) {
+ ByteBuffer binary_arg;
+ binary_arg = iprot.readBinary();
+ return binary_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case DOUBLE_ARG:
+ if (field.type == DOUBLE_ARG_FIELD_DESC.type) {
+ Double double_arg;
+ double_arg = iprot.readDouble();
+ return double_arg;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ }
+
+ @Override
+ protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case INT_ARG:
+ Integer int_arg = (Integer)value_;
+ oprot.writeI32(int_arg);
+ return;
+ case LONG_ARG:
+ Long long_arg = (Long)value_;
+ oprot.writeI64(long_arg);
+ return;
+ case STRING_ARG:
+ String string_arg = (String)value_;
+ oprot.writeString(string_arg);
+ return;
+ case BOOL_ARG:
+ Boolean bool_arg = (Boolean)value_;
+ oprot.writeBool(bool_arg);
+ return;
+ case BINARY_ARG:
+ ByteBuffer binary_arg = (ByteBuffer)value_;
+ oprot.writeBinary(binary_arg);
+ return;
+ case DOUBLE_ARG:
+ Double double_arg = (Double)value_;
+ oprot.writeDouble(double_arg);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(fieldID);
+ if (setField != null) {
+ switch (setField) {
+ case INT_ARG:
+ Integer int_arg;
+ int_arg = iprot.readI32();
+ return int_arg;
+ case LONG_ARG:
+ Long long_arg;
+ long_arg = iprot.readI64();
+ return long_arg;
+ case STRING_ARG:
+ String string_arg;
+ string_arg = iprot.readString();
+ return string_arg;
+ case BOOL_ARG:
+ Boolean bool_arg;
+ bool_arg = iprot.readBool();
+ return bool_arg;
+ case BINARY_ARG:
+ ByteBuffer binary_arg;
+ binary_arg = iprot.readBinary();
+ return binary_arg;
+ case DOUBLE_ARG:
+ Double double_arg;
+ double_arg = iprot.readDouble();
+ return double_arg;
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ throw new TProtocolException("Couldn't find a field with field id " + fieldID);
+ }
+ }
+
+ @Override
+ protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case INT_ARG:
+ Integer int_arg = (Integer)value_;
+ oprot.writeI32(int_arg);
+ return;
+ case LONG_ARG:
+ Long long_arg = (Long)value_;
+ oprot.writeI64(long_arg);
+ return;
+ case STRING_ARG:
+ String string_arg = (String)value_;
+ oprot.writeString(string_arg);
+ return;
+ case BOOL_ARG:
+ Boolean bool_arg = (Boolean)value_;
+ oprot.writeBool(bool_arg);
+ return;
+ case BINARY_ARG:
+ ByteBuffer binary_arg = (ByteBuffer)value_;
+ oprot.writeBinary(binary_arg);
+ return;
+ case DOUBLE_ARG:
+ Double double_arg = (Double)value_;
+ oprot.writeDouble(double_arg);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
+ switch (setField) {
+ case INT_ARG:
+ return INT_ARG_FIELD_DESC;
+ case LONG_ARG:
+ return LONG_ARG_FIELD_DESC;
+ case STRING_ARG:
+ return STRING_ARG_FIELD_DESC;
+ case BOOL_ARG:
+ return BOOL_ARG_FIELD_DESC;
+ case BINARY_ARG:
+ return BINARY_ARG_FIELD_DESC;
+ case DOUBLE_ARG:
+ return DOUBLE_ARG_FIELD_DESC;
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TStruct getStructDesc() {
+ return STRUCT_DESC;
+ }
+
+ @Override
+ protected _Fields enumForId(short id) {
+ return _Fields.findByThriftIdOrThrow(id);
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+
+ public int get_int_arg() {
+ if (getSetField() == _Fields.INT_ARG) {
+ return (Integer)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'int_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_int_arg(int value) {
+ setField_ = _Fields.INT_ARG;
+ value_ = value;
+ }
+
+ public long get_long_arg() {
+ if (getSetField() == _Fields.LONG_ARG) {
+ return (Long)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'long_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_long_arg(long value) {
+ setField_ = _Fields.LONG_ARG;
+ value_ = value;
+ }
+
+ public String get_string_arg() {
+ if (getSetField() == _Fields.STRING_ARG) {
+ return (String)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'string_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_string_arg(String value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.STRING_ARG;
+ value_ = value;
+ }
+
+ public boolean get_bool_arg() {
+ if (getSetField() == _Fields.BOOL_ARG) {
+ return (Boolean)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'bool_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_bool_arg(boolean value) {
+ setField_ = _Fields.BOOL_ARG;
+ value_ = value;
+ }
+
+ public byte[] get_binary_arg() {
+ set_binary_arg(org.apache.thrift.TBaseHelper.rightSize(buffer_for_binary_arg()));
+ ByteBuffer b = buffer_for_binary_arg();
+ return b == null ? null : b.array();
+ }
+
+ public ByteBuffer buffer_for_binary_arg() {
+ if (getSetField() == _Fields.BINARY_ARG) {
+ return org.apache.thrift.TBaseHelper.copyBinary((ByteBuffer)getFieldValue());
+ } else {
+ throw new RuntimeException("Cannot get field 'binary_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_binary_arg(byte[] value) {
+ set_binary_arg(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ }
+
+ public void set_binary_arg(ByteBuffer value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.BINARY_ARG;
+ value_ = value;
+ }
+
+ public double get_double_arg() {
+ if (getSetField() == _Fields.DOUBLE_ARG) {
+ return (Double)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'double_arg' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_double_arg(double value) {
+ setField_ = _Fields.DOUBLE_ARG;
+ value_ = value;
+ }
+
+ public boolean is_set_int_arg() {
+ return setField_ == _Fields.INT_ARG;
+ }
+
+
+ public boolean is_set_long_arg() {
+ return setField_ == _Fields.LONG_ARG;
+ }
+
+
+ public boolean is_set_string_arg() {
+ return setField_ == _Fields.STRING_ARG;
+ }
+
+
+ public boolean is_set_bool_arg() {
+ return setField_ == _Fields.BOOL_ARG;
+ }
+
+
+ public boolean is_set_binary_arg() {
+ return setField_ == _Fields.BINARY_ARG;
+ }
+
+
+ public boolean is_set_double_arg() {
+ return setField_ == _Fields.DOUBLE_ARG;
+ }
+
+
+ public boolean equals(Object other) {
+ if (other instanceof JavaObjectArg) {
+ return equals((JavaObjectArg)other);
+ } else {
+ return false;
+ }
+ }
+
+ public boolean equals(JavaObjectArg other) {
+ return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
+ }
+
+ @Override
+ public int compareTo(JavaObjectArg other) {
+ int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
+ if (lastComparison == 0) {
+ return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
+ }
+ return lastComparison;
+ }
+
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+ list.add(this.getClass().getName());
+ org.apache.thrift.TFieldIdEnum setField = getSetField();
+ if (setField != null) {
+ list.add(setField.getThriftFieldId());
+ Object value = getFieldValue();
+ if (value instanceof org.apache.thrift.TEnum) {
+ list.add(((org.apache.thrift.TEnum)getFieldValue()).getValue());
+ } else {
+ list.add(value);
+ }
+ }
+ return list.hashCode();
+ }
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/KillOptions.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/KillOptions.java b/jstorm-core/src/main/java/backtype/storm/generated/KillOptions.java
new file mode 100644
index 0000000..7abb762
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/KillOptions.java
@@ -0,0 +1,390 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class KillOptions implements org.apache.thrift.TBase<KillOptions, KillOptions._Fields>, java.io.Serializable, Cloneable, Comparable<KillOptions> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("KillOptions");
+
+ private static final org.apache.thrift.protocol.TField WAIT_SECS_FIELD_DESC = new org.apache.thrift.protocol.TField("wait_secs", org.apache.thrift.protocol.TType.I32, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new KillOptionsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new KillOptionsTupleSchemeFactory());
+ }
+
+ private int wait_secs; // optional
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ WAIT_SECS((short)1, "wait_secs");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // WAIT_SECS
+ return WAIT_SECS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __WAIT_SECS_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ private static final _Fields optionals[] = {_Fields.WAIT_SECS};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.WAIT_SECS, new org.apache.thrift.meta_data.FieldMetaData("wait_secs", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(KillOptions.class, metaDataMap);
+ }
+
+ public KillOptions() {
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public KillOptions(KillOptions other) {
+ __isset_bitfield = other.__isset_bitfield;
+ this.wait_secs = other.wait_secs;
+ }
+
+ public KillOptions deepCopy() {
+ return new KillOptions(this);
+ }
+
+ @Override
+ public void clear() {
+ set_wait_secs_isSet(false);
+ this.wait_secs = 0;
+ }
+
+ public int get_wait_secs() {
+ return this.wait_secs;
+ }
+
+ public void set_wait_secs(int wait_secs) {
+ this.wait_secs = wait_secs;
+ set_wait_secs_isSet(true);
+ }
+
+ public void unset_wait_secs() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __WAIT_SECS_ISSET_ID);
+ }
+
+ /** Returns true if field wait_secs is set (has been assigned a value) and false otherwise */
+ public boolean is_set_wait_secs() {
+ return EncodingUtils.testBit(__isset_bitfield, __WAIT_SECS_ISSET_ID);
+ }
+
+ public void set_wait_secs_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __WAIT_SECS_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case WAIT_SECS:
+ if (value == null) {
+ unset_wait_secs();
+ } else {
+ set_wait_secs((Integer)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case WAIT_SECS:
+ return Integer.valueOf(get_wait_secs());
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case WAIT_SECS:
+ return is_set_wait_secs();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof KillOptions)
+ return this.equals((KillOptions)that);
+ return false;
+ }
+
+ public boolean equals(KillOptions that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_wait_secs = true && this.is_set_wait_secs();
+ boolean that_present_wait_secs = true && that.is_set_wait_secs();
+ if (this_present_wait_secs || that_present_wait_secs) {
+ if (!(this_present_wait_secs && that_present_wait_secs))
+ return false;
+ if (this.wait_secs != that.wait_secs)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_wait_secs = true && (is_set_wait_secs());
+ list.add(present_wait_secs);
+ if (present_wait_secs)
+ list.add(wait_secs);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(KillOptions other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_wait_secs()).compareTo(other.is_set_wait_secs());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_wait_secs()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.wait_secs, other.wait_secs);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("KillOptions(");
+ boolean first = true;
+
+ if (is_set_wait_secs()) {
+ sb.append("wait_secs:");
+ sb.append(this.wait_secs);
+ first = false;
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class KillOptionsStandardSchemeFactory implements SchemeFactory {
+ public KillOptionsStandardScheme getScheme() {
+ return new KillOptionsStandardScheme();
+ }
+ }
+
+ private static class KillOptionsStandardScheme extends StandardScheme<KillOptions> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, KillOptions struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // WAIT_SECS
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.wait_secs = iprot.readI32();
+ struct.set_wait_secs_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, KillOptions struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.is_set_wait_secs()) {
+ oprot.writeFieldBegin(WAIT_SECS_FIELD_DESC);
+ oprot.writeI32(struct.wait_secs);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class KillOptionsTupleSchemeFactory implements SchemeFactory {
+ public KillOptionsTupleScheme getScheme() {
+ return new KillOptionsTupleScheme();
+ }
+ }
+
+ private static class KillOptionsTupleScheme extends TupleScheme<KillOptions> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, KillOptions struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_wait_secs()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_wait_secs()) {
+ oprot.writeI32(struct.wait_secs);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, KillOptions struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.wait_secs = iprot.readI32();
+ struct.set_wait_secs_isSet(true);
+ }
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/LocalStateData.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/LocalStateData.java b/jstorm-core/src/main/java/backtype/storm/generated/LocalStateData.java
new file mode 100644
index 0000000..2fd49b4
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/LocalStateData.java
@@ -0,0 +1,454 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class LocalStateData implements org.apache.thrift.TBase<LocalStateData, LocalStateData._Fields>, java.io.Serializable, Cloneable, Comparable<LocalStateData> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("LocalStateData");
+
+ private static final org.apache.thrift.protocol.TField SERIALIZED_PARTS_FIELD_DESC = new org.apache.thrift.protocol.TField("serialized_parts", org.apache.thrift.protocol.TType.MAP, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new LocalStateDataStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new LocalStateDataTupleSchemeFactory());
+ }
+
+ private Map<String,ThriftSerializedObject> serialized_parts; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ SERIALIZED_PARTS((short)1, "serialized_parts");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // SERIALIZED_PARTS
+ return SERIALIZED_PARTS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.SERIALIZED_PARTS, new org.apache.thrift.meta_data.FieldMetaData("serialized_parts", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ThriftSerializedObject.class))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(LocalStateData.class, metaDataMap);
+ }
+
+ public LocalStateData() {
+ }
+
+ public LocalStateData(
+ Map<String,ThriftSerializedObject> serialized_parts)
+ {
+ this();
+ this.serialized_parts = serialized_parts;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public LocalStateData(LocalStateData other) {
+ if (other.is_set_serialized_parts()) {
+ Map<String,ThriftSerializedObject> __this__serialized_parts = new HashMap<String,ThriftSerializedObject>(other.serialized_parts.size());
+ for (Map.Entry<String, ThriftSerializedObject> other_element : other.serialized_parts.entrySet()) {
+
+ String other_element_key = other_element.getKey();
+ ThriftSerializedObject other_element_value = other_element.getValue();
+
+ String __this__serialized_parts_copy_key = other_element_key;
+
+ ThriftSerializedObject __this__serialized_parts_copy_value = new ThriftSerializedObject(other_element_value);
+
+ __this__serialized_parts.put(__this__serialized_parts_copy_key, __this__serialized_parts_copy_value);
+ }
+ this.serialized_parts = __this__serialized_parts;
+ }
+ }
+
+ public LocalStateData deepCopy() {
+ return new LocalStateData(this);
+ }
+
+ @Override
+ public void clear() {
+ this.serialized_parts = null;
+ }
+
+ public int get_serialized_parts_size() {
+ return (this.serialized_parts == null) ? 0 : this.serialized_parts.size();
+ }
+
+ public void put_to_serialized_parts(String key, ThriftSerializedObject val) {
+ if (this.serialized_parts == null) {
+ this.serialized_parts = new HashMap<String,ThriftSerializedObject>();
+ }
+ this.serialized_parts.put(key, val);
+ }
+
+ public Map<String,ThriftSerializedObject> get_serialized_parts() {
+ return this.serialized_parts;
+ }
+
+ public void set_serialized_parts(Map<String,ThriftSerializedObject> serialized_parts) {
+ this.serialized_parts = serialized_parts;
+ }
+
+ public void unset_serialized_parts() {
+ this.serialized_parts = null;
+ }
+
+ /** Returns true if field serialized_parts is set (has been assigned a value) and false otherwise */
+ public boolean is_set_serialized_parts() {
+ return this.serialized_parts != null;
+ }
+
+ public void set_serialized_parts_isSet(boolean value) {
+ if (!value) {
+ this.serialized_parts = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case SERIALIZED_PARTS:
+ if (value == null) {
+ unset_serialized_parts();
+ } else {
+ set_serialized_parts((Map<String,ThriftSerializedObject>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case SERIALIZED_PARTS:
+ return get_serialized_parts();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case SERIALIZED_PARTS:
+ return is_set_serialized_parts();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof LocalStateData)
+ return this.equals((LocalStateData)that);
+ return false;
+ }
+
+ public boolean equals(LocalStateData that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_serialized_parts = true && this.is_set_serialized_parts();
+ boolean that_present_serialized_parts = true && that.is_set_serialized_parts();
+ if (this_present_serialized_parts || that_present_serialized_parts) {
+ if (!(this_present_serialized_parts && that_present_serialized_parts))
+ return false;
+ if (!this.serialized_parts.equals(that.serialized_parts))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_serialized_parts = true && (is_set_serialized_parts());
+ list.add(present_serialized_parts);
+ if (present_serialized_parts)
+ list.add(serialized_parts);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(LocalStateData other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_serialized_parts()).compareTo(other.is_set_serialized_parts());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_serialized_parts()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.serialized_parts, other.serialized_parts);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("LocalStateData(");
+ boolean first = true;
+
+ sb.append("serialized_parts:");
+ if (this.serialized_parts == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.serialized_parts);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_serialized_parts()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'serialized_parts' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class LocalStateDataStandardSchemeFactory implements SchemeFactory {
+ public LocalStateDataStandardScheme getScheme() {
+ return new LocalStateDataStandardScheme();
+ }
+ }
+
+ private static class LocalStateDataStandardScheme extends StandardScheme<LocalStateData> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, LocalStateData struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // SERIALIZED_PARTS
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map264 = iprot.readMapBegin();
+ struct.serialized_parts = new HashMap<String,ThriftSerializedObject>(2*_map264.size);
+ String _key265;
+ ThriftSerializedObject _val266;
+ for (int _i267 = 0; _i267 < _map264.size; ++_i267)
+ {
+ _key265 = iprot.readString();
+ _val266 = new ThriftSerializedObject();
+ _val266.read(iprot);
+ struct.serialized_parts.put(_key265, _val266);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_serialized_parts_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, LocalStateData struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.serialized_parts != null) {
+ oprot.writeFieldBegin(SERIALIZED_PARTS_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.serialized_parts.size()));
+ for (Map.Entry<String, ThriftSerializedObject> _iter268 : struct.serialized_parts.entrySet())
+ {
+ oprot.writeString(_iter268.getKey());
+ _iter268.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class LocalStateDataTupleSchemeFactory implements SchemeFactory {
+ public LocalStateDataTupleScheme getScheme() {
+ return new LocalStateDataTupleScheme();
+ }
+ }
+
+ private static class LocalStateDataTupleScheme extends TupleScheme<LocalStateData> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, LocalStateData struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ {
+ oprot.writeI32(struct.serialized_parts.size());
+ for (Map.Entry<String, ThriftSerializedObject> _iter269 : struct.serialized_parts.entrySet())
+ {
+ oprot.writeString(_iter269.getKey());
+ _iter269.getValue().write(oprot);
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, LocalStateData struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ {
+ org.apache.thrift.protocol.TMap _map270 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.serialized_parts = new HashMap<String,ThriftSerializedObject>(2*_map270.size);
+ String _key271;
+ ThriftSerializedObject _val272;
+ for (int _i273 = 0; _i273 < _map270.size; ++_i273)
+ {
+ _key271 = iprot.readString();
+ _val272 = new ThriftSerializedObject();
+ _val272.read(iprot);
+ struct.serialized_parts.put(_key271, _val272);
+ }
+ }
+ struct.set_serialized_parts_isSet(true);
+ }
+ }
+
+}
+
[23/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/InprocMessaging.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/InprocMessaging.java b/jstorm-client/src/main/java/backtype/storm/utils/InprocMessaging.java
deleted file mode 100644
index c58c4da..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/InprocMessaging.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingQueue;
-
-public class InprocMessaging {
- private static Map<Integer, LinkedBlockingQueue<Object>> _queues = new HashMap<Integer, LinkedBlockingQueue<Object>>();
- private static final Object _lock = new Object();
- private static int port = 1;
-
- public static int acquireNewPort() {
- int ret;
- synchronized (_lock) {
- ret = port;
- port++;
- }
- return ret;
- }
-
- public static void sendMessage(int port, Object msg) {
- getQueue(port).add(msg);
- }
-
- public static Object takeMessage(int port) throws InterruptedException {
- return getQueue(port).take();
- }
-
- public static Object pollMessage(int port) {
- return getQueue(port).poll();
- }
-
- private static LinkedBlockingQueue<Object> getQueue(int port) {
- synchronized (_lock) {
- if (!_queues.containsKey(port)) {
- _queues.put(port, new LinkedBlockingQueue<Object>());
- }
- return _queues.get(port);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/KeyedRoundRobinQueue.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/KeyedRoundRobinQueue.java b/jstorm-client/src/main/java/backtype/storm/utils/KeyedRoundRobinQueue.java
deleted file mode 100644
index 8e14d1c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/KeyedRoundRobinQueue.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Queue;
-import java.util.concurrent.Semaphore;
-
-public class KeyedRoundRobinQueue<V> {
- private final Object _lock = new Object();
- private Semaphore _size = new Semaphore(0);
- private Map<Object, Queue<V>> _queues = new HashMap<Object, Queue<V>>();
- private List<Object> _keyOrder = new ArrayList<Object>();
- private int _currIndex = 0;
-
- public void add(Object key, V val) {
- synchronized (_lock) {
- Queue<V> queue = _queues.get(key);
- if (queue == null) {
- queue = new LinkedList<V>();
- _queues.put(key, queue);
- _keyOrder.add(key);
- }
- queue.add(val);
- }
- _size.release();
- }
-
- public V take() throws InterruptedException {
- _size.acquire();
- synchronized (_lock) {
- Object key = _keyOrder.get(_currIndex);
- Queue<V> queue = _queues.get(key);
- V ret = queue.remove();
- if (queue.isEmpty()) {
- _keyOrder.remove(_currIndex);
- _queues.remove(key);
- if (_keyOrder.size() == 0) {
- _currIndex = 0;
- } else {
- _currIndex = _currIndex % _keyOrder.size();
- }
- } else {
- _currIndex = (_currIndex + 1) % _keyOrder.size();
- }
- return ret;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ListDelegate.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ListDelegate.java b/jstorm-client/src/main/java/backtype/storm/utils/ListDelegate.java
deleted file mode 100644
index ac98847..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ListDelegate.java
+++ /dev/null
@@ -1,139 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.Collection;
-import java.util.Iterator;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.ListIterator;
-
-public class ListDelegate implements List<Object> {
- private List<Object> _delegate;
-
- public ListDelegate() {
- _delegate = new ArrayList<Object>();
- }
-
- public void setDelegate(List<Object> delegate) {
- _delegate = delegate;
- }
-
- public List<Object> getDelegate() {
- return _delegate;
- }
-
- @Override
- public int size() {
- return _delegate.size();
- }
-
- @Override
- public boolean isEmpty() {
- return _delegate.isEmpty();
- }
-
- @Override
- public boolean contains(Object o) {
- return _delegate.contains(o);
- }
-
- @Override
- public Iterator<Object> iterator() {
- return _delegate.iterator();
- }
-
- @Override
- public Object[] toArray() {
- return _delegate.toArray();
- }
-
- @Override
- public <T> T[] toArray(T[] ts) {
- return _delegate.toArray(ts);
- }
-
- @Override
- public boolean add(Object e) {
- return _delegate.add(e);
- }
-
- @Override
- public boolean remove(Object o) {
- return _delegate.remove(o);
- }
-
- @Override
- public boolean containsAll(Collection<?> clctn) {
- return _delegate.containsAll(clctn);
- }
-
- @Override
- public boolean addAll(Collection<? extends Object> clctn) {
- return _delegate.addAll(clctn);
- }
-
- @Override
- public boolean addAll(int i, Collection<? extends Object> clctn) {
- return _delegate.addAll(i, clctn);
- }
-
- @Override
- public boolean removeAll(Collection<?> clctn) {
- return _delegate.removeAll(clctn);
- }
-
- @Override
- public boolean retainAll(Collection<?> clctn) {
- return _delegate.retainAll(clctn);
- }
-
- @Override
- public void clear() {
- _delegate.clear();
- }
-
- @Override
- public Object get(int i) {
- return _delegate.get(i);
- }
-
- @Override
- public Object set(int i, Object e) {
- return _delegate.set(i, e);
- }
-
- @Override
- public void add(int i, Object e) {
- _delegate.add(i, e);
- }
-
- @Override
- public Object remove(int i) {
- return _delegate.remove(i);
- }
-
- @Override
- public int indexOf(Object o) {
- return _delegate.indexOf(o);
- }
-
- @Override
- public int lastIndexOf(Object o) {
- return _delegate.lastIndexOf(o);
- }
-
- @Override
- public ListIterator<Object> listIterator() {
- return _delegate.listIterator();
- }
-
- @Override
- public ListIterator<Object> listIterator(int i) {
- return _delegate.listIterator(i);
- }
-
- @Override
- public List<Object> subList(int i, int i1) {
- return _delegate.subList(i, i1);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/LocalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/LocalState.java b/jstorm-client/src/main/java/backtype/storm/utils/LocalState.java
deleted file mode 100644
index 6be9534..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/LocalState.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package backtype.storm.utils;
-
-import org.apache.commons.io.FileUtils;
-
-import java.io.File;
-import java.util.Map;
-import java.util.HashMap;
-import java.io.IOException;
-
-/**
- * A simple, durable, atomic K/V database. *Very inefficient*, should only be
- * used for occasional reads/writes. Every read/write hits disk.
- */
-public class LocalState {
- private VersionedStore _vs;
-
- public LocalState(String backingDir) throws IOException {
- _vs = new VersionedStore(backingDir);
- }
-
- public synchronized Map<Object, Object> snapshot() throws IOException {
- int attempts = 0;
- while (true) {
- String latestPath = _vs.mostRecentVersionPath();
- if (latestPath == null)
- return new HashMap<Object, Object>();
- try {
- return (Map<Object, Object>) Utils.deserialize(FileUtils
- .readFileToByteArray(new File(latestPath)));
- } catch (IOException e) {
- attempts++;
- if (attempts >= 10) {
- throw e;
- }
- }
- }
- }
-
- public Object get(Object key) throws IOException {
- return snapshot().get(key);
- }
-
- public synchronized void put(Object key, Object val) throws IOException {
- put(key, val, true);
- }
-
- public synchronized void put(Object key, Object val, boolean cleanup)
- throws IOException {
- Map<Object, Object> curr = snapshot();
- curr.put(key, val);
- persist(curr, cleanup);
- }
-
- public synchronized void remove(Object key) throws IOException {
- remove(key, true);
- }
-
- public synchronized void remove(Object key, boolean cleanup)
- throws IOException {
- Map<Object, Object> curr = snapshot();
- curr.remove(key);
- persist(curr, cleanup);
- }
-
- public synchronized void cleanup(int keepVersions) throws IOException {
- _vs.cleanup(keepVersions);
- }
-
- private void persist(Map<Object, Object> val, boolean cleanup)
- throws IOException {
- byte[] toWrite = Utils.serialize(val);
- String newPath = _vs.createVersion();
- FileUtils.writeByteArrayToFile(new File(newPath), toWrite);
- _vs.succeedVersion(newPath);
- if (cleanup)
- _vs.cleanup(4);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/MutableInt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/MutableInt.java b/jstorm-client/src/main/java/backtype/storm/utils/MutableInt.java
deleted file mode 100644
index 0f41f5c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/MutableInt.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package backtype.storm.utils;
-
-public class MutableInt {
- int val;
-
- public MutableInt(int val) {
- this.val = val;
- }
-
- public void set(int val) {
- this.val = val;
- }
-
- public int get() {
- return val;
- }
-
- public int increment() {
- return increment(1);
- }
-
- public int increment(int amt) {
- val += amt;
- return val;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/MutableLong.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/MutableLong.java b/jstorm-client/src/main/java/backtype/storm/utils/MutableLong.java
deleted file mode 100644
index 25b066f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/MutableLong.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package backtype.storm.utils;
-
-public class MutableLong {
- long val;
-
- public MutableLong(long val) {
- this.val = val;
- }
-
- public void set(long val) {
- this.val = val;
- }
-
- public long get() {
- return val;
- }
-
- public long increment() {
- return increment(1);
- }
-
- public long increment(long amt) {
- val += amt;
- return val;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/MutableObject.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/MutableObject.java b/jstorm-client/src/main/java/backtype/storm/utils/MutableObject.java
deleted file mode 100644
index 12e802b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/MutableObject.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package backtype.storm.utils;
-
-public class MutableObject {
- Object o = null;
-
- public MutableObject() {
-
- }
-
- public MutableObject(Object o) {
- this.o = o;
- }
-
- public void setObject(Object o) {
- this.o = o;
- }
-
- public Object getObject() {
- return o;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/NimbusClient.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/NimbusClient.java b/jstorm-client/src/main/java/backtype/storm/utils/NimbusClient.java
deleted file mode 100644
index 1d5ae3a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/NimbusClient.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.generated.Nimbus;
-import backtype.storm.security.auth.ThriftClient;
-
-public class NimbusClient extends ThriftClient {
- private Nimbus.Client _client;
- private static final Logger LOG = LoggerFactory
- .getLogger(NimbusClient.class);
-
- @SuppressWarnings("unchecked")
- public static NimbusClient getConfiguredClient(Map conf) {
- try {
- // String nimbusHost = (String) conf.get(Config.NIMBUS_HOST);
- // int nimbusPort =
- // Utils.getInt(conf.get(Config.NIMBUS_THRIFT_PORT));
- // return new NimbusClient(conf, nimbusHost, nimbusPort);
- return new NimbusClient(conf);
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- }
-
- private NimbusClient(Map conf) throws Exception {
- this(conf, null);
- }
-
- @SuppressWarnings("unchecked")
- private NimbusClient(Map conf, Integer timeout) throws Exception {
- super(conf, timeout);
- flush();
- }
-
- public Nimbus.Client getClient() {
- return _client;
- }
-
- @Override
- protected void flush() {
- // TODO Auto-generated method stub
- _client = new Nimbus.Client(_protocol);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/RegisteredGlobalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/RegisteredGlobalState.java b/jstorm-client/src/main/java/backtype/storm/utils/RegisteredGlobalState.java
deleted file mode 100644
index 74bb749..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/RegisteredGlobalState.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.UUID;
-
-/**
- * This class is used as part of testing Storm. It is used to keep track of
- * "global metrics" in an atomic way. For example, it is used for doing
- * fine-grained detection of when a local Storm cluster is idle by tracking the
- * number of transferred tuples vs the number of processed tuples.
- */
-public class RegisteredGlobalState {
- private static HashMap<String, Object> _states = new HashMap<String, Object>();
- private static final Object _lock = new Object();
-
- public static Object globalLock() {
- return _lock;
- }
-
- public static String registerState(Object init) {
- synchronized (_lock) {
- String id = UUID.randomUUID().toString();
- _states.put(id, init);
- return id;
- }
- }
-
- public static void setState(String id, Object init) {
- synchronized (_lock) {
- _states.put(id, init);
- }
- }
-
- public static Object getState(String id) {
- synchronized (_lock) {
- Object ret = _states.get(id);
- // System.out.println("State: " + ret.toString());
- return ret;
- }
- }
-
- public static void clearState(String id) {
- synchronized (_lock) {
- _states.remove(id);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/RotatingMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/RotatingMap.java b/jstorm-client/src/main/java/backtype/storm/utils/RotatingMap.java
deleted file mode 100644
index c96663e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/RotatingMap.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.log4j.Logger;
-
-/**
- * Expires keys that have not been updated in the configured number of seconds.
- * The algorithm used will take between expirationSecs and expirationSecs * (1 +
- * 1 / (numBuckets-1)) to actually expire the message.
- *
- * get, put, remove, containsKey, and size take O(numBuckets) time to run.
- *
- *
- * Please use com.alibaba.jstorm.utils.RotatingMap
- */
-@Deprecated
-public class RotatingMap<K, V> {
- // this default ensures things expire at most 50% past the expiration time
- private static final int DEFAULT_NUM_BUCKETS = 3;
-
- public static interface ExpiredCallback<K, V> {
- public void expire(K key, V val);
- }
-
- private LinkedList<HashMap<K, V>> _buckets;
-
- private ExpiredCallback _callback;
-
- public RotatingMap(int numBuckets, ExpiredCallback<K, V> callback) {
- if (numBuckets < 2) {
- throw new IllegalArgumentException("numBuckets must be >= 2");
- }
- _buckets = new LinkedList<HashMap<K, V>>();
- for (int i = 0; i < numBuckets; i++) {
- _buckets.add(new HashMap<K, V>());
- }
-
- _callback = callback;
- }
-
- public RotatingMap(ExpiredCallback<K, V> callback) {
- this(DEFAULT_NUM_BUCKETS, callback);
- }
-
- public RotatingMap(int numBuckets) {
- this(numBuckets, null);
- }
-
- public Map<K, V> rotate() {
- Map<K, V> dead = _buckets.removeLast();
- _buckets.addFirst(new HashMap<K, V>());
- if (_callback != null) {
- for (Entry<K, V> entry : dead.entrySet()) {
- _callback.expire(entry.getKey(), entry.getValue());
- }
- }
- return dead;
- }
-
- public boolean containsKey(K key) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return true;
- }
- }
- return false;
- }
-
- public V get(K key) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.get(key);
- }
- }
- return null;
- }
-
- public void put(K key, V value) {
- Iterator<HashMap<K, V>> it = _buckets.iterator();
- HashMap<K, V> bucket = it.next();
- bucket.put(key, value);
- while (it.hasNext()) {
- bucket = it.next();
- bucket.remove(key);
- }
- }
-
- public Object remove(K key) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.remove(key);
- }
- }
- return null;
- }
-
- public int size() {
- int size = 0;
- for (HashMap<K, V> bucket : _buckets) {
- size += bucket.size();
- }
- return size;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ServiceRegistry.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ServiceRegistry.java b/jstorm-client/src/main/java/backtype/storm/utils/ServiceRegistry.java
deleted file mode 100644
index 554d2d0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ServiceRegistry.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.UUID;
-
-// this class should be combined with RegisteredGlobalState
-public class ServiceRegistry {
- private static HashMap<String, Object> _services = new HashMap<String, Object>();
- private static final Object _lock = new Object();
-
- public static String registerService(Object service) {
- synchronized (_lock) {
- String id = UUID.randomUUID().toString();
- _services.put(id, service);
- return id;
- }
- }
-
- public static Object getService(String id) {
- synchronized (_lock) {
- return _services.get(id);
- }
- }
-
- public static void unregisterService(String id) {
- synchronized (_lock) {
- _services.remove(id);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ShellProcess.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ShellProcess.java b/jstorm-client/src/main/java/backtype/storm/utils/ShellProcess.java
deleted file mode 100644
index a847976..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ShellProcess.java
+++ /dev/null
@@ -1,182 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.utils;
-
-import backtype.storm.Config;
-import backtype.storm.multilang.ISerializer;
-import backtype.storm.multilang.BoltMsg;
-import backtype.storm.multilang.NoOutputException;
-import backtype.storm.multilang.ShellMsg;
-import backtype.storm.multilang.SpoutMsg;
-import backtype.storm.task.TopologyContext;
-import java.io.File;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.io.IOUtils;
-import org.apache.log4j.Logger;
-
-public class ShellProcess implements Serializable {
- public static Logger LOG = Logger.getLogger(ShellProcess.class);
- public static Logger ShellLogger;
- private Process _subprocess;
- private InputStream processErrorStream;
- private String[] command;
- public ISerializer serializer;
- public Number pid;
- public String componentName;
-
- public ShellProcess(String[] command) {
- this.command = command;
- }
-
- public Number launch(Map conf, TopologyContext context) {
- ProcessBuilder builder = new ProcessBuilder(command);
- builder.directory(new File(context.getCodeDir()));
-
- ShellLogger = Logger.getLogger(context.getThisComponentId());
-
- this.componentName = context.getThisComponentId();
- this.serializer = getSerializer(conf);
-
- try {
- _subprocess = builder.start();
- processErrorStream = _subprocess.getErrorStream();
- serializer.initialize(_subprocess.getOutputStream(), _subprocess.getInputStream());
- this.pid = serializer.connect(conf, context);
- } catch (IOException e) {
- throw new RuntimeException(
- "Error when launching multilang subprocess\n"
- + getErrorsString(), e);
- } catch (NoOutputException e) {
- throw new RuntimeException(e + getErrorsString() + "\n");
- }
- return this.pid;
- }
-
- private ISerializer getSerializer(Map conf) {
- //get factory class name
- String serializer_className = (String)conf.get(Config.TOPOLOGY_MULTILANG_SERIALIZER);
- LOG.info("Storm multilang serializer: " + serializer_className);
-
- ISerializer serializer = null;
- try {
- //create a factory class
- Class klass = Class.forName(serializer_className);
- //obtain a serializer object
- Object obj = klass.newInstance();
- serializer = (ISerializer)obj;
- } catch(Exception e) {
- throw new RuntimeException("Failed to construct multilang serializer from serializer " + serializer_className, e);
- }
- return serializer;
- }
-
- public void destroy() {
- _subprocess.destroy();
- }
-
- public ShellMsg readShellMsg() throws IOException {
- try {
- return serializer.readShellMsg();
- } catch (NoOutputException e) {
- throw new RuntimeException(e + getErrorsString() + "\n");
- }
- }
-
- public void writeBoltMsg(BoltMsg msg) throws IOException {
- serializer.writeBoltMsg(msg);
- // Log any info sent on the error stream
- logErrorStream();
- }
-
- public void writeSpoutMsg(SpoutMsg msg) throws IOException {
- serializer.writeSpoutMsg(msg);
- // Log any info sent on the error stream
- logErrorStream();
- }
-
- public void writeTaskIds(List<Integer> taskIds) throws IOException {
- serializer.writeTaskIds(taskIds);
- // Log any info sent on the error stream
- logErrorStream();
- }
-
- public void logErrorStream() {
- try {
- while (processErrorStream.available() > 0) {
- int bufferSize = processErrorStream.available();
- byte[] errorReadingBuffer = new byte[bufferSize];
- processErrorStream.read(errorReadingBuffer, 0, bufferSize);
- ShellLogger.info(new String(errorReadingBuffer));
- }
- } catch (Exception e) {
- }
- }
-
- public String getErrorsString() {
- if (processErrorStream != null) {
- try {
- return IOUtils.toString(processErrorStream);
- } catch (IOException e) {
- return "(Unable to capture error stream)";
- }
- } else {
- return "";
- }
- }
-
- /**
- *
- * @return pid, if the process has been launched, null otherwise.
- */
- public Number getPid() {
- return this.pid;
- }
-
- /**
- *
- * @return the name of component.
- */
- public String getComponentName() {
- return this.componentName;
- }
-
- /**
- *
- * @return exit code of the process if process is terminated, -1 if process is not started or terminated.
- */
- public int getExitCode() {
- try {
- return this._subprocess != null ? this._subprocess.exitValue() : -1;
- } catch(IllegalThreadStateException e) {
- return -1;
- }
- }
-
- public String getProcessInfoString() {
- return String.format("pid:%s, name:%s", pid, componentName);
- }
-
- public String getProcessTerminationInfoString() {
- return String.format(" exitCode:%s, errorString:%s ", getExitCode(), getErrorsString());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ThreadResourceManager.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ThreadResourceManager.java b/jstorm-client/src/main/java/backtype/storm/utils/ThreadResourceManager.java
deleted file mode 100644
index 0971d0c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ThreadResourceManager.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.concurrent.ConcurrentLinkedQueue;
-
-public class ThreadResourceManager<T> {
- public static interface ResourceFactory<X> {
- X makeResource();
- }
-
- ResourceFactory<T> _factory;
- ConcurrentLinkedQueue<T> _resources = new ConcurrentLinkedQueue<T>();
-
- public ThreadResourceManager(ResourceFactory<T> factory) {
- _factory = factory;
- }
-
- public T acquire() {
- T ret = _resources.poll();
- if (ret == null) {
- ret = _factory.makeResource();
- }
- return ret;
- }
-
- public void release(T resource) {
- _resources.add(resource);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ThriftTopologyUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ThriftTopologyUtils.java b/jstorm-client/src/main/java/backtype/storm/utils/ThriftTopologyUtils.java
deleted file mode 100644
index 1b52fe3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ThriftTopologyUtils.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-import backtype.storm.generated.Bolt;
-import backtype.storm.generated.ComponentCommon;
-import backtype.storm.generated.SpoutSpec;
-import backtype.storm.generated.StateSpoutSpec;
-import backtype.storm.generated.StormTopology;
-
-public class ThriftTopologyUtils {
- public static Set<String> getComponentIds(StormTopology topology) {
- Set<String> ret = new HashSet<String>();
- for (StormTopology._Fields f : StormTopology.metaDataMap.keySet()) {
- Map<String, Object> componentMap = (Map<String, Object>) topology
- .getFieldValue(f);
- ret.addAll(componentMap.keySet());
- }
- return ret;
- }
-
- public static Map<String, Object> getComponents(StormTopology topology) {
- Map<String, Object> ret = new HashMap<String, Object>();
- for (StormTopology._Fields f : StormTopology.metaDataMap.keySet()) {
- Map<String, Object> componentMap = (Map<String, Object>) topology
- .getFieldValue(f);
- ret.putAll(componentMap);
- }
- return ret;
- }
-
- public static ComponentCommon getComponentCommon(StormTopology topology,
- String componentId) {
- for (StormTopology._Fields f : StormTopology.metaDataMap.keySet()) {
- Map<String, Object> componentMap = (Map<String, Object>) topology
- .getFieldValue(f);
- if (componentMap.containsKey(componentId)) {
- Object component = componentMap.get(componentId);
- if (component instanceof Bolt) {
- return ((Bolt) component).get_common();
- }
- if (component instanceof SpoutSpec) {
- return ((SpoutSpec) component).get_common();
- }
- if (component instanceof StateSpoutSpec) {
- return ((StateSpoutSpec) component).get_common();
- }
- throw new RuntimeException(
- "Unreachable code! No get_common conversion for component "
- + component);
- }
- }
- throw new IllegalArgumentException(
- "Could not find component common for " + componentId);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/Time.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/Time.java b/jstorm-client/src/main/java/backtype/storm/utils/Time.java
deleted file mode 100644
index e791dc4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/Time.java
+++ /dev/null
@@ -1,93 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.concurrent.atomic.AtomicLong;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.atomic.AtomicBoolean;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class Time {
- public static Logger LOG = LoggerFactory.getLogger(Time.class);
-
- private static AtomicBoolean simulating = new AtomicBoolean(false);
- // TODO: should probably use weak references here or something
- private static volatile Map<Thread, AtomicLong> threadSleepTimes;
- private static final Object sleepTimesLock = new Object();
-
- private static AtomicLong simulatedCurrTimeMs; // should this be a thread
- // local that's allowed to
- // keep advancing?
-
- public static void startSimulating() {
- simulating.set(true);
- simulatedCurrTimeMs = new AtomicLong(0);
- threadSleepTimes = new ConcurrentHashMap<Thread, AtomicLong>();
- }
-
- public static void stopSimulating() {
- simulating.set(false);
- threadSleepTimes = null;
- }
-
- public static boolean isSimulating() {
- return simulating.get();
- }
-
- public static void sleepUntil(long targetTimeMs)
- throws InterruptedException {
- if (simulating.get()) {
- try {
- synchronized (sleepTimesLock) {
- threadSleepTimes.put(Thread.currentThread(),
- new AtomicLong(targetTimeMs));
- }
- while (simulatedCurrTimeMs.get() < targetTimeMs) {
- Thread.sleep(10);
- }
- } finally {
- synchronized (sleepTimesLock) {
- threadSleepTimes.remove(Thread.currentThread());
- }
- }
- } else {
- long sleepTime = targetTimeMs - currentTimeMillis();
- if (sleepTime > 0)
- Thread.sleep(sleepTime);
- }
- }
-
- public static void sleep(long ms) throws InterruptedException {
- sleepUntil(currentTimeMillis() + ms);
- }
-
- public static long currentTimeMillis() {
- if (simulating.get()) {
- return simulatedCurrTimeMs.get();
- } else {
- return System.currentTimeMillis();
- }
- }
-
- public static int currentTimeSecs() {
- return (int) (currentTimeMillis() / 1000);
- }
-
- public static void advanceTime(long ms) {
- if (!simulating.get())
- throw new IllegalStateException(
- "Cannot simulate time unless in simulation mode");
- simulatedCurrTimeMs.set(simulatedCurrTimeMs.get() + ms);
- }
-
- public static boolean isThreadWaiting(Thread t) {
- if (!simulating.get())
- throw new IllegalStateException("Must be in simulation mode");
- AtomicLong time;
- synchronized (sleepTimesLock) {
- time = threadSleepTimes.get(t);
- }
- return !t.isAlive() || time != null
- && currentTimeMillis() < time.longValue();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/TimeCacheMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/TimeCacheMap.java b/jstorm-client/src/main/java/backtype/storm/utils/TimeCacheMap.java
deleted file mode 100644
index a1052c4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/TimeCacheMap.java
+++ /dev/null
@@ -1,143 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-
-/**
- * Expires keys that have not been updated in the configured number of seconds.
- * The algorithm used will take between expirationSecs and expirationSecs * (1 +
- * 1 / (numBuckets-1)) to actually expire the message.
- *
- * get, put, remove, containsKey, and size take O(numBuckets) time to run.
- *
- * Please use com.alibaba.jstorm.utils.TimeCacheMap
- */
-@Deprecated
-public class TimeCacheMap<K, V> {
- // this default ensures things expire at most 50% past the expiration time
- private static final int DEFAULT_NUM_BUCKETS = 3;
-
- @Deprecated
- public static interface ExpiredCallback<K, V> {
- public void expire(K key, V val);
- }
-
- private LinkedList<HashMap<K, V>> _buckets;
-
- private final Object _lock = new Object();
- private Thread _cleaner;
- private ExpiredCallback _callback;
-
- public TimeCacheMap(int expirationSecs, int numBuckets,
- ExpiredCallback<K, V> callback) {
- if (numBuckets < 2) {
- throw new IllegalArgumentException("numBuckets must be >= 2");
- }
- _buckets = new LinkedList<HashMap<K, V>>();
- for (int i = 0; i < numBuckets; i++) {
- _buckets.add(new HashMap<K, V>());
- }
-
- _callback = callback;
- final long expirationMillis = expirationSecs * 1000L;
- final long sleepTime = expirationMillis / (numBuckets - 1);
- _cleaner = new Thread(new Runnable() {
- public void run() {
- try {
- while (true) {
- Map<K, V> dead = null;
- Time.sleep(sleepTime);
- synchronized (_lock) {
- dead = _buckets.removeLast();
- _buckets.addFirst(new HashMap<K, V>());
- }
- if (_callback != null) {
- for (Entry<K, V> entry : dead.entrySet()) {
- _callback.expire(entry.getKey(),
- entry.getValue());
- }
- }
- }
- } catch (InterruptedException ex) {
-
- }
- }
- });
- _cleaner.setDaemon(true);
- _cleaner.start();
- }
-
- public TimeCacheMap(int expirationSecs, ExpiredCallback<K, V> callback) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS, callback);
- }
-
- public TimeCacheMap(int expirationSecs) {
- this(expirationSecs, DEFAULT_NUM_BUCKETS);
- }
-
- public TimeCacheMap(int expirationSecs, int numBuckets) {
- this(expirationSecs, numBuckets, null);
- }
-
- public boolean containsKey(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return true;
- }
- }
- return false;
- }
- }
-
- public V get(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.get(key);
- }
- }
- return null;
- }
- }
-
- public void put(K key, V value) {
- synchronized (_lock) {
- Iterator<HashMap<K, V>> it = _buckets.iterator();
- HashMap<K, V> bucket = it.next();
- bucket.put(key, value);
- while (it.hasNext()) {
- bucket = it.next();
- bucket.remove(key);
- }
- }
- }
-
- public Object remove(K key) {
- synchronized (_lock) {
- for (HashMap<K, V> bucket : _buckets) {
- if (bucket.containsKey(key)) {
- return bucket.remove(key);
- }
- }
- return null;
- }
- }
-
- public int size() {
- synchronized (_lock) {
- int size = 0;
- for (HashMap<K, V> bucket : _buckets) {
- size += bucket.size();
- }
- return size;
- }
- }
-
- public void cleanup() {
- _cleaner.interrupt();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/TupleHelpers.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/TupleHelpers.java b/jstorm-client/src/main/java/backtype/storm/utils/TupleHelpers.java
deleted file mode 100644
index 2f8b5cb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/TupleHelpers.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.utils;
-
-import backtype.storm.Constants;
-import backtype.storm.tuple.Tuple;
-
-public class TupleHelpers {
- private TupleHelpers() {
-
- }
-
- public static boolean isTickTuple(Tuple tuple) {
- return tuple.getSourceComponent().equals(Constants.SYSTEM_COMPONENT_ID)
- && tuple.getSourceStreamId().equals(
- Constants.SYSTEM_TICK_STREAM_ID);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/Utils.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/Utils.java b/jstorm-client/src/main/java/backtype/storm/utils/Utils.java
deleted file mode 100644
index b168a50..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/Utils.java
+++ /dev/null
@@ -1,592 +0,0 @@
-package backtype.storm.utils;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.FileOutputStream;
-import java.io.FileReader;
-import java.io.IOException;
-import java.io.InputStreamReader;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.nio.ByteBuffer;
-import java.nio.channels.Channels;
-import java.nio.channels.WritableByteChannel;
-import java.util.ArrayList;
-import java.util.Enumeration;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TreeMap;
-import java.util.UUID;
-
-import org.apache.commons.io.input.ClassLoaderObjectInputStream;
-import org.apache.commons.lang.StringUtils;
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.curator.framework.CuratorFrameworkFactory;
-import org.apache.curator.retry.ExponentialBackoffRetry;
-import org.apache.thrift7.TException;
-import org.json.simple.JSONValue;
-import org.yaml.snakeyaml.Yaml;
-
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-
-import backtype.storm.Config;
-import backtype.storm.generated.ComponentCommon;
-import backtype.storm.generated.ComponentObject;
-import backtype.storm.generated.StormTopology;
-import clojure.lang.IFn;
-import clojure.lang.RT;
-
-public class Utils {
- public static final String DEFAULT_STREAM_ID = "default";
-
- public static Object newInstance(String klass) {
- try {
- Class c = Class.forName(klass);
- return c.newInstance();
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public static byte[] serialize(Object obj) {
- try {
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- ObjectOutputStream oos = new ObjectOutputStream(bos);
- oos.writeObject(obj);
- oos.close();
- return bos.toByteArray();
- } catch (IOException ioe) {
- throw new RuntimeException(ioe);
- }
- }
-
- public static Object deserialize(byte[] serialized, URLClassLoader loader) {
- try {
- ByteArrayInputStream bis = new ByteArrayInputStream(serialized);
- Object ret = null;
- if (loader != null) {
- ClassLoaderObjectInputStream cis = new ClassLoaderObjectInputStream(
- loader, bis);
- ret = cis.readObject();
- cis.close();
- } else {
- ObjectInputStream ois = new ObjectInputStream(bis);
- ret = ois.readObject();
- ois.close();
- }
- return ret;
- } catch (IOException ioe) {
- throw new RuntimeException(ioe);
- } catch (ClassNotFoundException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static Object deserialize(byte[] serialized) {
- return deserialize(serialized, WorkerClassLoader.getInstance());
- }
-
- public static String to_json(Object m) {
- //return JSON.toJSONString(m);
- return JSONValue.toJSONString(m);
- }
-
- public static Object from_json(String json) {
- if (json == null) {
- return null;
- } else {
- //return JSON.parse(json);
- return JSONValue.parse(json);
- }
- }
-
- public static String toPrettyJsonString(Object obj) {
- Gson gson2 = new GsonBuilder().setPrettyPrinting().create();
- String ret = gson2.toJson(obj);
-
- return ret;
- }
-
- public static <T> String join(Iterable<T> coll, String sep) {
- Iterator<T> it = coll.iterator();
- String ret = "";
- while (it.hasNext()) {
- ret = ret + it.next();
- if (it.hasNext()) {
- ret = ret + sep;
- }
- }
- return ret;
- }
-
- public static void sleep(long millis) {
- try {
- Time.sleep(millis);
- } catch (InterruptedException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static List<URL> findResources(String name) {
- try {
- Enumeration<URL> resources = Thread.currentThread()
- .getContextClassLoader().getResources(name);
- List<URL> ret = new ArrayList<URL>();
- while (resources.hasMoreElements()) {
- ret.add(resources.nextElement());
- }
- return ret;
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static Map findAndReadConfigFile(String name, boolean mustExist) {
- try {
- HashSet<URL> resources = new HashSet<URL>(findResources(name));
- if (resources.isEmpty()) {
- if (mustExist)
- throw new RuntimeException(
- "Could not find config file on classpath " + name);
- else
- return new HashMap();
- }
- if (resources.size() > 1) {
- throw new RuntimeException(
- "Found multiple "
- + name
- + " resources. You're probably bundling the Storm jars with your topology jar. "
- + resources);
- }
- URL resource = resources.iterator().next();
- Yaml yaml = new Yaml();
- Map ret = (Map) yaml.load(new InputStreamReader(resource
- .openStream()));
- if (ret == null)
- ret = new HashMap();
-
- return new HashMap(ret);
-
- } catch (IOException e) {
- throw new RuntimeException(e);
- }
- }
-
- public static Map findAndReadConfigFile(String name) {
- return findAndReadConfigFile(name, true);
- }
-
- public static Map readDefaultConfig() {
- return findAndReadConfigFile("defaults.yaml", true);
- }
-
- public static Map readCommandLineOpts() {
- Map ret = new HashMap();
- String commandOptions = System.getProperty("storm.options");
- if (commandOptions != null) {
- commandOptions = commandOptions.replaceAll("%%%%", " ");
- String[] configs = commandOptions.split(",");
- for (String config : configs) {
- String[] options = config.split("=");
- if (options.length == 2) {
- ret.put(options[0], options[1]);
- }
- }
- }
- return ret;
- }
-
- public static void replaceLocalDir(Map<Object, Object> conf) {
- String stormHome = System.getProperty("jstorm.home");
- boolean isEmpty = StringUtils.isBlank(stormHome);
-
- Map<Object, Object> replaceMap = new HashMap<Object, Object>();
-
- for (Entry entry : conf.entrySet()) {
- Object key = entry.getKey();
- Object value = entry.getValue();
-
- if (value instanceof String) {
- if (StringUtils.isBlank((String)value) == true ) {
- continue;
- }
-
- String str = (String)value;
- if (isEmpty == true) {
- // replace %JSTORM_HOME% as current directory
- str = str.replace("%JSTORM_HOME%", ".");
- }else {
- str = str.replace("%JSTORM_HOME%", stormHome);
- }
-
-
- replaceMap.put(key, str);
- }
- }
-
-
- conf.putAll(replaceMap);
- }
-
- public static Map loadDefinedConf(String confFile) {
- File file = new File(confFile);
- if (file.exists() == false) {
- return findAndReadConfigFile(confFile, true);
- }
-
- Yaml yaml = new Yaml();
- Map ret;
- try {
- ret = (Map) yaml.load(new FileReader(file));
- } catch (FileNotFoundException e) {
- ret = null;
- }
- if (ret == null)
- ret = new HashMap();
-
- return new HashMap(ret);
- }
-
- public static Map readStormConfig() {
- Map ret = readDefaultConfig();
- String confFile = System.getProperty("storm.conf.file");
- Map storm;
- if (StringUtils.isBlank(confFile) == true) {
- storm = findAndReadConfigFile("storm.yaml", false);
- } else {
- storm = loadDefinedConf(confFile);
- }
- ret.putAll(storm);
- ret.putAll(readCommandLineOpts());
-
- replaceLocalDir(ret);
- return ret;
- }
-
- private static Object normalizeConf(Object conf) {
- if (conf == null)
- return new HashMap();
- if (conf instanceof Map) {
- Map confMap = new HashMap((Map) conf);
- for (Object key : confMap.keySet()) {
- Object val = confMap.get(key);
- confMap.put(key, normalizeConf(val));
- }
- return confMap;
- } else if (conf instanceof List) {
- List confList = new ArrayList((List) conf);
- for (int i = 0; i < confList.size(); i++) {
- Object val = confList.get(i);
- confList.set(i, normalizeConf(val));
- }
- return confList;
- } else if (conf instanceof Integer) {
- return ((Integer) conf).longValue();
- } else if (conf instanceof Float) {
- return ((Float) conf).doubleValue();
- } else {
- return conf;
- }
- }
-
-
-
- public static boolean isValidConf(Map<String, Object> stormConf) {
- return normalizeConf(stormConf).equals(
- normalizeConf(Utils.from_json(Utils.to_json(stormConf))));
- }
-
- public static Object getSetComponentObject(ComponentObject obj,
- URLClassLoader loader) {
- if (obj.getSetField() == ComponentObject._Fields.SERIALIZED_JAVA) {
- return Utils.deserialize(obj.get_serialized_java(), loader);
- } else if (obj.getSetField() == ComponentObject._Fields.JAVA_OBJECT) {
- return obj.get_java_object();
- } else {
- return obj.get_shell();
- }
- }
-
- public static <S, T> T get(Map<S, T> m, S key, T def) {
- T ret = m.get(key);
- if (ret == null) {
- ret = def;
- }
- return ret;
- }
-
- public static List<Object> tuple(Object... values) {
- List<Object> ret = new ArrayList<Object>();
- for (Object v : values) {
- ret.add(v);
- }
- return ret;
- }
-
- public static void downloadFromMaster(Map conf, String file,
- String localFile) throws IOException, TException {
- WritableByteChannel out = null;
- NimbusClient client = null;
- try {
- client = NimbusClient.getConfiguredClient(conf);
- String id = client.getClient().beginFileDownload(file);
- out = Channels.newChannel(new FileOutputStream(localFile));
- while (true) {
- ByteBuffer chunk = client.getClient().downloadChunk(id);
- int written = out.write(chunk);
- if (written == 0)
- break;
- }
- } finally {
- if (out != null)
- out.close();
- if (client != null)
- client.close();
- }
- }
-
- public static IFn loadClojureFn(String namespace, String name) {
- try {
- clojure.lang.Compiler.eval(RT.readString("(require '" + namespace
- + ")"));
- } catch (Exception e) {
- // if playing from the repl and defining functions, file won't exist
- }
- return (IFn) RT.var(namespace, name).deref();
- }
-
- public static boolean isSystemId(String id) {
- return id.startsWith("__");
- }
-
- public static <K, V> Map<V, K> reverseMap(Map<K, V> map) {
- Map<V, K> ret = new HashMap<V, K>();
- for (K key : map.keySet()) {
- ret.put(map.get(key), key);
- }
- return ret;
- }
-
- public static ComponentCommon getComponentCommon(StormTopology topology,
- String id) {
- if (topology.get_spouts().containsKey(id)) {
- return topology.get_spouts().get(id).get_common();
- }
- if (topology.get_bolts().containsKey(id)) {
- return topology.get_bolts().get(id).get_common();
- }
- if (topology.get_state_spouts().containsKey(id)) {
- return topology.get_state_spouts().get(id).get_common();
- }
- throw new IllegalArgumentException("Could not find component with id "
- + id);
- }
-
- public static Integer getInt(Object o) {
- if (o instanceof Long) {
- return ((Long) o).intValue();
- } else if (o instanceof Integer) {
- return (Integer) o;
- } else if (o instanceof Short) {
- return ((Short) o).intValue();
- } else if (o instanceof String) {
- return Integer.parseInt(((String) o));
- } else {
- throw new IllegalArgumentException("Don't know how to convert " + o
- + " to int");
- }
- }
-
- public static Integer getInt(Object o, Integer defaultValue) {
- if (null == o) {
- return defaultValue;
- }
-
- if(o instanceof Long) {
- return ((Long) o ).intValue();
- } else if (o instanceof Integer) {
- return (Integer) o;
- } else if (o instanceof Short) {
- return ((Short) o).intValue();
- } else if (o instanceof String) {
- return Integer.parseInt(((String) o));
- } else {
- throw new IllegalArgumentException("Don't know how to convert " + o + " to int");
- }
- }
-
- public static long secureRandomLong() {
- return UUID.randomUUID().getLeastSignificantBits();
- }
-
- public static CuratorFramework newCurator(Map conf, List<String> servers,
- Object port, String root) {
- return newCurator(conf, servers, port, root, null);
- }
-
- public static class BoundedExponentialBackoffRetry extends
- ExponentialBackoffRetry {
-
- protected final int maxRetryInterval;
-
- public BoundedExponentialBackoffRetry(int baseSleepTimeMs,
- int maxRetries, int maxSleepTimeMs) {
- super(baseSleepTimeMs, maxRetries);
- this.maxRetryInterval = maxSleepTimeMs;
- }
-
- public int getMaxRetryInterval() {
- return this.maxRetryInterval;
- }
-
- @Override
- public int getSleepTimeMs(int count, long elapsedMs) {
- return Math.min(maxRetryInterval,
- super.getSleepTimeMs(count, elapsedMs));
- }
-
- }
-
- public static CuratorFramework newCurator(Map conf, List<String> servers,
- Object port, String root, ZookeeperAuthInfo auth) {
- List<String> serverPorts = new ArrayList<String>();
- for (String zkServer : (List<String>) servers) {
- serverPorts.add(zkServer + ":" + Utils.getInt(port));
- }
- String zkStr = StringUtils.join(serverPorts, ",") + root;
- CuratorFrameworkFactory.Builder builder = CuratorFrameworkFactory
- .builder()
- .connectString(zkStr)
- .connectionTimeoutMs(
- Utils.getInt(conf
- .get(Config.STORM_ZOOKEEPER_CONNECTION_TIMEOUT)))
- .sessionTimeoutMs(
- Utils.getInt(conf
- .get(Config.STORM_ZOOKEEPER_SESSION_TIMEOUT)))
- .retryPolicy(
- new BoundedExponentialBackoffRetry(
- Utils.getInt(conf
- .get(Config.STORM_ZOOKEEPER_RETRY_INTERVAL)),
- Utils.getInt(conf
- .get(Config.STORM_ZOOKEEPER_RETRY_TIMES)),
- Utils.getInt(conf
- .get(Config.STORM_ZOOKEEPER_RETRY_INTERVAL_CEILING))));
- if (auth != null && auth.scheme != null) {
- builder = builder.authorization(auth.scheme, auth.payload);
- }
- return builder.build();
- }
-
- public static CuratorFramework newCurator(Map conf, List<String> servers,
- Object port) {
- return newCurator(conf, servers, port, "");
- }
-
- public static CuratorFramework newCuratorStarted(Map conf,
- List<String> servers, Object port, String root) {
- CuratorFramework ret = newCurator(conf, servers, port, root);
- ret.start();
- return ret;
- }
-
- public static CuratorFramework newCuratorStarted(Map conf,
- List<String> servers, Object port) {
- CuratorFramework ret = newCurator(conf, servers, port);
- ret.start();
- return ret;
- }
-
- /**
- *
- (defn integer-divided [sum num-pieces] (let [base (int (/ sum
- * num-pieces)) num-inc (mod sum num-pieces) num-bases (- num-pieces
- * num-inc)] (if (= num-inc 0) {base num-bases} {base num-bases (inc base)
- * num-inc} )))
- *
- * @param sum
- * @param numPieces
- * @return
- */
-
- public static TreeMap<Integer, Integer> integerDivided(int sum,
- int numPieces) {
- int base = sum / numPieces;
- int numInc = sum % numPieces;
- int numBases = numPieces - numInc;
- TreeMap<Integer, Integer> ret = new TreeMap<Integer, Integer>();
- ret.put(base, numBases);
- if (numInc != 0) {
- ret.put(base + 1, numInc);
- }
- return ret;
- }
-
- public static byte[] toByteArray(ByteBuffer buffer) {
- byte[] ret = new byte[buffer.remaining()];
- buffer.get(ret, 0, ret.length);
- return ret;
- }
-
- public static boolean exceptionCauseIsInstanceOf(Class klass,
- Throwable throwable) {
- Throwable t = throwable;
- while (t != null) {
- if (klass.isInstance(t)) {
- return true;
- }
- t = t.getCause();
- }
- return false;
- }
-
- public static List<String> tokenize_path(String path) {
- String[] toks = path.split("/");
- java.util.ArrayList<String> rtn = new ArrayList<String>();
- for (String str : toks) {
- if (!str.isEmpty()) {
- rtn.add(str);
- }
- }
- return rtn;
- }
-
- public static String toks_to_path(List<String> toks) {
- StringBuffer buff = new StringBuffer();
- buff.append("/");
- int size = toks.size();
- for (int i = 0; i < size; i++) {
- buff.append(toks.get(i));
- if (i < (size - 1)) {
- buff.append("/");
- }
-
- }
- return buff.toString();
- }
-
- public static String normalize_path(String path) {
- String rtn = toks_to_path(tokenize_path(path));
- return rtn;
- }
-
- public static String printStack() {
- StringBuilder sb = new StringBuilder();
- sb.append("\nCurrent call stack:\n");
- StackTraceElement[] stackElements= Thread.currentThread().getStackTrace();
- for (int i = 2; i < stackElements.length; i++) {
- sb.append("\t").append(stackElements[i]).append("\n");
- }
-
- return sb.toString();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/VersionedStore.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/VersionedStore.java b/jstorm-client/src/main/java/backtype/storm/utils/VersionedStore.java
deleted file mode 100644
index cd9e750..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/VersionedStore.java
+++ /dev/null
@@ -1,178 +0,0 @@
-package backtype.storm.utils;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.io.File;
-
-import org.apache.commons.io.FileUtils;
-
-public class VersionedStore {
- private static final String FINISHED_VERSION_SUFFIX = ".version";
-
- private String _root;
-
- public VersionedStore(String path) throws IOException {
- _root = path;
- mkdirs(_root);
- }
-
- public String getRoot() {
- return _root;
- }
-
- public String versionPath(long version) {
- return new File(_root, "" + version).getAbsolutePath();
- }
-
- public String mostRecentVersionPath() throws IOException {
- Long v = mostRecentVersion();
- if (v == null)
- return null;
- return versionPath(v);
- }
-
- public String mostRecentVersionPath(long maxVersion) throws IOException {
- Long v = mostRecentVersion(maxVersion);
- if (v == null)
- return null;
- return versionPath(v);
- }
-
- public Long mostRecentVersion() throws IOException {
- List<Long> all = getAllVersions();
- if (all.size() == 0)
- return null;
- return all.get(0);
- }
-
- public Long mostRecentVersion(long maxVersion) throws IOException {
- List<Long> all = getAllVersions();
- for (Long v : all) {
- if (v <= maxVersion)
- return v;
- }
- return null;
- }
-
- public String createVersion() throws IOException {
- Long mostRecent = mostRecentVersion();
- long version = Time.currentTimeMillis();
- if (mostRecent != null && version <= mostRecent) {
- version = mostRecent + 1;
- }
- return createVersion(version);
- }
-
- public String createVersion(long version) throws IOException {
- String ret = versionPath(version);
- if (getAllVersions().contains(version))
- throw new RuntimeException(
- "Version already exists or data already exists");
- else
- return ret;
- }
-
- public void failVersion(String path) throws IOException {
- deleteVersion(validateAndGetVersion(path));
- }
-
- public void deleteVersion(long version) throws IOException {
- File versionFile = new File(versionPath(version));
- File tokenFile = new File(tokenPath(version));
-
- if (versionFile.exists()) {
- FileUtils.forceDelete(versionFile);
- }
- if (tokenFile.exists()) {
- FileUtils.forceDelete(tokenFile);
- }
- }
-
- public void succeedVersion(String path) throws IOException {
- long version = validateAndGetVersion(path);
- // should rewrite this to do a file move
- createNewFile(tokenPath(version));
- }
-
- public void cleanup() throws IOException {
- cleanup(-1);
- }
-
- public void cleanup(int versionsToKeep) throws IOException {
- List<Long> versions = getAllVersions();
- if (versionsToKeep >= 0) {
- versions = versions.subList(0,
- Math.min(versions.size(), versionsToKeep));
- }
- HashSet<Long> keepers = new HashSet<Long>(versions);
-
- for (String p : listDir(_root)) {
- Long v = parseVersion(p);
- if (v != null && !keepers.contains(v)) {
- deleteVersion(v);
- }
- }
- }
-
- /**
- * Sorted from most recent to oldest
- */
- public List<Long> getAllVersions() throws IOException {
- List<Long> ret = new ArrayList<Long>();
- for (String s : listDir(_root)) {
- if (s.endsWith(FINISHED_VERSION_SUFFIX)) {
- ret.add(validateAndGetVersion(s));
- }
- }
- Collections.sort(ret);
- Collections.reverse(ret);
- return ret;
- }
-
- private String tokenPath(long version) {
- return new File(_root, "" + version + FINISHED_VERSION_SUFFIX)
- .getAbsolutePath();
- }
-
- private long validateAndGetVersion(String path) {
- Long v = parseVersion(path);
- if (v == null)
- throw new RuntimeException(path + " is not a valid version");
- return v;
- }
-
- private Long parseVersion(String path) {
- String name = new File(path).getName();
- if (name.endsWith(FINISHED_VERSION_SUFFIX)) {
- name = name.substring(0,
- name.length() - FINISHED_VERSION_SUFFIX.length());
- }
- try {
- return Long.parseLong(name);
- } catch (NumberFormatException e) {
- return null;
- }
- }
-
- private void createNewFile(String path) throws IOException {
- new File(path).createNewFile();
- }
-
- private void mkdirs(String path) throws IOException {
- new File(path).mkdirs();
- }
-
- private List<String> listDir(String dir) throws IOException {
- List<String> ret = new ArrayList<String>();
- File[] contents = new File(dir).listFiles();
- if (contents != null) {
- for (File f : contents) {
- ret.add(f.getAbsolutePath());
- }
- }
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/WindowedTimeThrottler.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/WindowedTimeThrottler.java b/jstorm-client/src/main/java/backtype/storm/utils/WindowedTimeThrottler.java
deleted file mode 100644
index 5d43884..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/WindowedTimeThrottler.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package backtype.storm.utils;
-
-public class WindowedTimeThrottler {
- long _windowMillis;
- int _maxAmt;
- long _windowStartTime;
- int _windowEvents = 0;
-
- public WindowedTimeThrottler(Number windowMillis, Number maxAmt) {
- _windowMillis = windowMillis.longValue();
- _maxAmt = maxAmt.intValue();
- _windowStartTime = System.currentTimeMillis();
- }
-
- public boolean isThrottled() {
- resetIfNecessary();
- return _windowEvents >= _maxAmt;
- }
-
- // returns void if the event should continue, false if the event should not
- // be done
- public void markEvent() {
- resetIfNecessary();
- _windowEvents++;
-
- }
-
- private void resetIfNecessary() {
- long now = System.currentTimeMillis();
- if (now - _windowStartTime >= _windowMillis) {
- _windowStartTime = now;
- _windowEvents = 0;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/WorkerClassLoader.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/WorkerClassLoader.java b/jstorm-client/src/main/java/backtype/storm/utils/WorkerClassLoader.java
deleted file mode 100644
index a0c7073..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/WorkerClassLoader.java
+++ /dev/null
@@ -1,156 +0,0 @@
-package backtype.storm.utils;
-
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.log4j.Logger;
-
-public class WorkerClassLoader extends URLClassLoader {
-
- public static Logger LOG = Logger.getLogger(WorkerClassLoader.class);
-
- private ClassLoader defaultClassLoader;
-
- private ClassLoader JDKClassLoader;
-
- private boolean isDebug;
-
- protected static WorkerClassLoader instance;
-
- protected static boolean enable;
-
- protected static Map<Thread, ClassLoader> threadContextCache;
-
- protected WorkerClassLoader(URL[] urls, ClassLoader defaultClassLoader,
- ClassLoader JDKClassLoader, boolean isDebug) {
- super(urls, JDKClassLoader);
- this.defaultClassLoader = defaultClassLoader;
- this.JDKClassLoader = JDKClassLoader;
- this.isDebug = isDebug;
-
- // TODO Auto-generated constructor stub
- }
-
- @Override
- public Class<?> loadClass(String name) throws ClassNotFoundException {
- Class<?> result = null;
- try {
- result = this.findLoadedClass(name);
-
- if (result != null) {
- return result;
- }
-
- try {
- result = JDKClassLoader.loadClass(name);
- if (result != null)
- return result;
- } catch (Exception e) {
-
- }
-
- try {
- if (name.startsWith("org.apache.log4j") == false
- && name.startsWith("backtype.storm") == false
- && name.startsWith("com.alibaba.jstorm") == false) {
- result = findClass(name);
-
- if (result != null) {
- return result;
- }
- }
-
- } catch (Exception e) {
-
- }
-
- result = defaultClassLoader.loadClass(name);
- return result;
-
- } finally {
- if (result != null) {
- ClassLoader resultClassLoader = result.getClassLoader();
- LOG.info("Successfully load class " + name + " by "
- + resultClassLoader + ",threadContextLoader:"
- + Thread.currentThread().getContextClassLoader());
- } else {
- LOG.warn("Failed to load class " + name
- + ",threadContextLoader:"
- + Thread.currentThread().getContextClassLoader());
- }
-
- if (isDebug) {
- LOG.info(Utils.printStack());
- }
- }
-
- }
-
- public static WorkerClassLoader mkInstance(URL[] urls,
- ClassLoader DefaultClassLoader, ClassLoader JDKClassLoader,
- boolean enable, boolean isDebug) {
- WorkerClassLoader.enable = enable;
- if (enable == false) {
- LOG.info("Don't enable UserDefine ClassLoader");
- return null;
- }
-
- synchronized (WorkerClassLoader.class) {
- if (instance == null) {
- instance = new WorkerClassLoader(urls, DefaultClassLoader,
- JDKClassLoader, isDebug);
-
- threadContextCache = new ConcurrentHashMap<Thread, ClassLoader>();
- }
-
- }
-
- LOG.info("Successfully create classloader " + mk_list(urls));
- return instance;
- }
-
- public static WorkerClassLoader getInstance() {
- return instance;
- }
-
- public static boolean isEnable() {
- return enable;
- }
-
- public static void switchThreadContext() {
- if (enable == false) {
- return;
- }
-
- Thread thread = Thread.currentThread();
- ClassLoader oldClassLoader = thread.getContextClassLoader();
- threadContextCache.put(thread, oldClassLoader);
- thread.setContextClassLoader(instance);
- }
-
- public static void restoreThreadContext() {
- if (enable == false) {
- return;
- }
-
- Thread thread = Thread.currentThread();
- ClassLoader oldClassLoader = threadContextCache.get(thread);
- if (oldClassLoader != null) {
- thread.setContextClassLoader(oldClassLoader);
- } else {
- LOG.info("No context classloader of " + thread.getName());
- }
- }
-
- private static <V> List<V> mk_list(V... args) {
- ArrayList<V> rtn = new ArrayList<V>();
- for (V o : args) {
- rtn.add(o);
- }
- return rtn;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/WritableUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/WritableUtils.java b/jstorm-client/src/main/java/backtype/storm/utils/WritableUtils.java
deleted file mode 100644
index 9a44a5b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/WritableUtils.java
+++ /dev/null
@@ -1,377 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * This file originally comes from the Apache Hadoop project. Changes have been made to the file.
- *
- */
-
-package backtype.storm.utils;
-
-import java.io.*;
-
-import java.util.zip.GZIPInputStream;
-import java.util.zip.GZIPOutputStream;
-
-public final class WritableUtils {
-
- public static byte[] readCompressedByteArray(DataInput in)
- throws IOException {
- int length = in.readInt();
- if (length == -1)
- return null;
- byte[] buffer = new byte[length];
- in.readFully(buffer); // could/should use readFully(buffer,0,length)?
- GZIPInputStream gzi = new GZIPInputStream(new ByteArrayInputStream(
- buffer, 0, buffer.length));
- byte[] outbuf = new byte[length];
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- int len;
- while ((len = gzi.read(outbuf, 0, outbuf.length)) != -1) {
- bos.write(outbuf, 0, len);
- }
- byte[] decompressed = bos.toByteArray();
- bos.close();
- gzi.close();
- return decompressed;
- }
-
- public static void skipCompressedByteArray(DataInput in) throws IOException {
- int length = in.readInt();
- if (length != -1) {
- skipFully(in, length);
- }
- }
-
- public static int writeCompressedByteArray(DataOutput out, byte[] bytes)
- throws IOException {
- if (bytes != null) {
- ByteArrayOutputStream bos = new ByteArrayOutputStream();
- GZIPOutputStream gzout = new GZIPOutputStream(bos);
- gzout.write(bytes, 0, bytes.length);
- gzout.close();
- byte[] buffer = bos.toByteArray();
- int len = buffer.length;
- out.writeInt(len);
- out.write(buffer, 0, len);
- /* debug only! Once we have confidence, can lose this. */
- return ((bytes.length != 0) ? (100 * buffer.length) / bytes.length
- : 0);
- } else {
- out.writeInt(-1);
- return -1;
- }
- }
-
- /* Ugly utility, maybe someone else can do this better */
- public static String readCompressedString(DataInput in) throws IOException {
- byte[] bytes = readCompressedByteArray(in);
- if (bytes == null)
- return null;
- return new String(bytes, "UTF-8");
- }
-
- public static int writeCompressedString(DataOutput out, String s)
- throws IOException {
- return writeCompressedByteArray(out, (s != null) ? s.getBytes("UTF-8")
- : null);
- }
-
- /*
- *
- * Write a String as a Network Int n, followed by n Bytes Alternative to 16
- * bit read/writeUTF. Encoding standard is... ?
- */
- public static void writeString(DataOutput out, String s) throws IOException {
- if (s != null) {
- byte[] buffer = s.getBytes("UTF-8");
- int len = buffer.length;
- out.writeInt(len);
- out.write(buffer, 0, len);
- } else {
- out.writeInt(-1);
- }
- }
-
- /*
- * Read a String as a Network Int n, followed by n Bytes Alternative to 16
- * bit read/writeUTF. Encoding standard is... ?
- */
- public static String readString(DataInput in) throws IOException {
- int length = in.readInt();
- if (length == -1)
- return null;
- byte[] buffer = new byte[length];
- in.readFully(buffer); // could/should use readFully(buffer,0,length)?
- return new String(buffer, "UTF-8");
- }
-
- /*
- * Write a String array as a Nework Int N, followed by Int N Byte Array
- * Strings. Could be generalised using introspection.
- */
- public static void writeStringArray(DataOutput out, String[] s)
- throws IOException {
- out.writeInt(s.length);
- for (int i = 0; i < s.length; i++) {
- writeString(out, s[i]);
- }
- }
-
- /*
- * Write a String array as a Nework Int N, followed by Int N Byte Array of
- * compressed Strings. Handles also null arrays and null values. Could be
- * generalised using introspection.
- */
- public static void writeCompressedStringArray(DataOutput out, String[] s)
- throws IOException {
- if (s == null) {
- out.writeInt(-1);
- return;
- }
- out.writeInt(s.length);
- for (int i = 0; i < s.length; i++) {
- writeCompressedString(out, s[i]);
- }
- }
-
- /*
- * Write a String array as a Nework Int N, followed by Int N Byte Array
- * Strings. Could be generalised using introspection. Actually this bit
- * couldn't...
- */
- public static String[] readStringArray(DataInput in) throws IOException {
- int len = in.readInt();
- if (len == -1)
- return null;
- String[] s = new String[len];
- for (int i = 0; i < len; i++) {
- s[i] = readString(in);
- }
- return s;
- }
-
- /*
- * Write a String array as a Nework Int N, followed by Int N Byte Array
- * Strings. Could be generalised using introspection. Handles null arrays
- * and null values.
- */
- public static String[] readCompressedStringArray(DataInput in)
- throws IOException {
- int len = in.readInt();
- if (len == -1)
- return null;
- String[] s = new String[len];
- for (int i = 0; i < len; i++) {
- s[i] = readCompressedString(in);
- }
- return s;
- }
-
- /*
- *
- * Test Utility Method Display Byte Array.
- */
- public static void displayByteArray(byte[] record) {
- int i;
- for (i = 0; i < record.length - 1; i++) {
- if (i % 16 == 0) {
- System.out.println();
- }
- System.out.print(Integer.toHexString(record[i] >> 4 & 0x0F));
- System.out.print(Integer.toHexString(record[i] & 0x0F));
- System.out.print(",");
- }
- System.out.print(Integer.toHexString(record[i] >> 4 & 0x0F));
- System.out.print(Integer.toHexString(record[i] & 0x0F));
- System.out.println();
- }
-
- /**
- * Serializes an integer to a binary stream with zero-compressed encoding.
- * For -120 <= i <= 127, only one byte is used with the actual value. For
- * other values of i, the first byte value indicates whether the integer is
- * positive or negative, and the number of bytes that follow. If the first
- * byte value v is between -121 and -124, the following integer is positive,
- * with number of bytes that follow are -(v+120). If the first byte value v
- * is between -125 and -128, the following integer is negative, with number
- * of bytes that follow are -(v+124). Bytes are stored in the
- * high-non-zero-byte-first order.
- *
- * @param stream
- * Binary output stream
- * @param i
- * Integer to be serialized
- * @throws java.io.IOException
- */
- public static void writeVInt(DataOutput stream, int i) throws IOException {
- writeVLong(stream, i);
- }
-
- /**
- * Serializes a long to a binary stream with zero-compressed encoding. For
- * -112 <= i <= 127, only one byte is used with the actual value. For other
- * values of i, the first byte value indicates whether the long is positive
- * or negative, and the number of bytes that follow. If the first byte value
- * v is between -113 and -120, the following long is positive, with number
- * of bytes that follow are -(v+112). If the first byte value v is between
- * -121 and -128, the following long is negative, with number of bytes that
- * follow are -(v+120). Bytes are stored in the high-non-zero-byte-first
- * order.
- *
- * @param stream
- * Binary output stream
- * @param i
- * Long to be serialized
- * @throws java.io.IOException
- */
- public static void writeVLong(DataOutput stream, long i) throws IOException {
- if (i >= -112 && i <= 127) {
- stream.writeByte((byte) i);
- return;
- }
-
- int len = -112;
- if (i < 0) {
- i ^= -1L; // take one's complement'
- len = -120;
- }
-
- long tmp = i;
- while (tmp != 0) {
- tmp = tmp >> 8;
- len--;
- }
-
- stream.writeByte((byte) len);
-
- len = (len < -120) ? -(len + 120) : -(len + 112);
-
- for (int idx = len; idx != 0; idx--) {
- int shiftbits = (idx - 1) * 8;
- long mask = 0xFFL << shiftbits;
- stream.writeByte((byte) ((i & mask) >> shiftbits));
- }
- }
-
- /**
- * Reads a zero-compressed encoded long from input stream and returns it.
- *
- * @param stream
- * Binary input stream
- * @throws java.io.IOException
- * @return deserialized long from stream.
- */
- public static long readVLong(DataInput stream) throws IOException {
- byte firstByte = stream.readByte();
- int len = decodeVIntSize(firstByte);
- if (len == 1) {
- return firstByte;
- }
- long i = 0;
- for (int idx = 0; idx < len - 1; idx++) {
- byte b = stream.readByte();
- i = i << 8;
- i = i | (b & 0xFF);
- }
- return (isNegativeVInt(firstByte) ? (i ^ -1L) : i);
- }
-
- /**
- * Reads a zero-compressed encoded integer from input stream and returns it.
- *
- * @param stream
- * Binary input stream
- * @throws java.io.IOException
- * @return deserialized integer from stream.
- */
- public static int readVInt(DataInput stream) throws IOException {
- return (int) readVLong(stream);
- }
-
- /**
- * Given the first byte of a vint/vlong, determine the sign
- *
- * @param value
- * the first byte
- * @return is the value negative
- */
- public static boolean isNegativeVInt(byte value) {
- return value < -120 || (value >= -112 && value < 0);
- }
-
- /**
- * Parse the first byte of a vint/vlong to determine the number of bytes
- *
- * @param value
- * the first byte of the vint/vlong
- * @return the total number of bytes (1 to 9)
- */
- public static int decodeVIntSize(byte value) {
- if (value >= -112) {
- return 1;
- } else if (value < -120) {
- return -119 - value;
- }
- return -111 - value;
- }
-
- /**
- * Get the encoded length if an integer is stored in a variable-length
- * format
- *
- * @return the encoded length
- */
- public static int getVIntSize(long i) {
- if (i >= -112 && i <= 127) {
- return 1;
- }
-
- if (i < 0) {
- i ^= -1L; // take one's complement'
- }
- // find the number of bytes with non-leading zeros
- int dataBits = Long.SIZE - Long.numberOfLeadingZeros(i);
- // find the number of data bytes + length byte
- return (dataBits + 7) / 8 + 1;
- }
-
- /**
- * Skip <i>len</i> number of bytes in input stream<i>in</i>
- *
- * @param in
- * input stream
- * @param len
- * number of bytes to skip
- * @throws IOException
- * when skipped less number of bytes
- */
- public static void skipFully(DataInput in, int len) throws IOException {
- int total = 0;
- int cur = 0;
-
- while ((total < len) && ((cur = in.skipBytes(len - total)) > 0)) {
- total += cur;
- }
-
- if (total < len) {
- throw new IOException("Not able to skip " + len
- + " bytes, possibly " + "due to end of input.");
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ZookeeperAuthInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ZookeeperAuthInfo.java b/jstorm-client/src/main/java/backtype/storm/utils/ZookeeperAuthInfo.java
deleted file mode 100644
index 4c6df4a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ZookeeperAuthInfo.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package backtype.storm.utils;
-
-import backtype.storm.Config;
-import java.io.UnsupportedEncodingException;
-import java.util.Map;
-
-public class ZookeeperAuthInfo {
- public String scheme;
- public byte[] payload = null;
-
- public ZookeeperAuthInfo(Map conf) {
- String scheme = (String) conf.get(Config.STORM_ZOOKEEPER_AUTH_SCHEME);
- String payload = (String) conf.get(Config.STORM_ZOOKEEPER_AUTH_PAYLOAD);
- if (scheme != null) {
- this.scheme = scheme;
- if (payload != null) {
- try {
- this.payload = payload.getBytes("UTF-8");
- } catch (UnsupportedEncodingException ex) {
- throw new RuntimeException(ex);
- }
- }
- }
- }
-
- public ZookeeperAuthInfo(String scheme, byte[] payload) {
- this.scheme = scheme;
- this.payload = payload;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/disruptor/AbstractSequencerExt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/AbstractSequencerExt.java b/jstorm-client/src/main/java/backtype/storm/utils/disruptor/AbstractSequencerExt.java
deleted file mode 100644
index 3c4f1f9..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/disruptor/AbstractSequencerExt.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package backtype.storm.utils.disruptor;
-
-import com.lmax.disruptor.AbstractSequencer;
-import com.lmax.disruptor.WaitStrategy;
-
-public abstract class AbstractSequencerExt extends AbstractSequencer {
- private static boolean waitSleep = true;
-
- public static boolean isWaitSleep() {
- return waitSleep;
- }
-
- public static void setWaitSleep(boolean waitSleep) {
- AbstractSequencerExt.waitSleep = waitSleep;
- }
-
- public AbstractSequencerExt(int bufferSize, WaitStrategy waitStrategy) {
- super(bufferSize, waitStrategy);
- }
-
-}
[16/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/DistributedRPCInvocations.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/DistributedRPCInvocations.py b/jstorm-client/src/main/py/storm/DistributedRPCInvocations.py
deleted file mode 100644
index 4f951a9..0000000
--- a/jstorm-client/src/main/py/storm/DistributedRPCInvocations.py
+++ /dev/null
@@ -1,549 +0,0 @@
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-from thrift.Thrift import *
-from ttypes import *
-from thrift.Thrift import TProcessor
-from thrift.transport import TTransport
-from thrift.protocol import TBinaryProtocol, TProtocol
-try:
- from thrift.protocol import fastbinary
-except:
- fastbinary = None
-
-
-class Iface:
- def result(self, id, result):
- """
- Parameters:
- - id
- - result
- """
- pass
-
- def fetchRequest(self, functionName):
- """
- Parameters:
- - functionName
- """
- pass
-
- def failRequest(self, id):
- """
- Parameters:
- - id
- """
- pass
-
-
-class Client(Iface):
- def __init__(self, iprot, oprot=None):
- self._iprot = self._oprot = iprot
- if oprot is not None:
- self._oprot = oprot
- self._seqid = 0
-
- def result(self, id, result):
- """
- Parameters:
- - id
- - result
- """
- self.send_result(id, result)
- self.recv_result()
-
- def send_result(self, id, result):
- self._oprot.writeMessageBegin('result', TMessageType.CALL, self._seqid)
- args = result_args()
- args.id = id
- args.result = result
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_result(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = result_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- return
-
- def fetchRequest(self, functionName):
- """
- Parameters:
- - functionName
- """
- self.send_fetchRequest(functionName)
- return self.recv_fetchRequest()
-
- def send_fetchRequest(self, functionName):
- self._oprot.writeMessageBegin('fetchRequest', TMessageType.CALL, self._seqid)
- args = fetchRequest_args()
- args.functionName = functionName
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_fetchRequest(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = fetchRequest_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "fetchRequest failed: unknown result");
-
- def failRequest(self, id):
- """
- Parameters:
- - id
- """
- self.send_failRequest(id)
- self.recv_failRequest()
-
- def send_failRequest(self, id):
- self._oprot.writeMessageBegin('failRequest', TMessageType.CALL, self._seqid)
- args = failRequest_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_failRequest(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = failRequest_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- return
-
-
-class Processor(Iface, TProcessor):
- def __init__(self, handler):
- self._handler = handler
- self._processMap = {}
- self._processMap["result"] = Processor.process_result
- self._processMap["fetchRequest"] = Processor.process_fetchRequest
- self._processMap["failRequest"] = Processor.process_failRequest
-
- def process(self, iprot, oprot):
- (name, type, seqid) = iprot.readMessageBegin()
- if name not in self._processMap:
- iprot.skip(TType.STRUCT)
- iprot.readMessageEnd()
- x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
- oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
- x.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
- return
- else:
- self._processMap[name](self, seqid, iprot, oprot)
- return True
-
- def process_result(self, seqid, iprot, oprot):
- args = result_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = result_result()
- self._handler.result(args.id, args.result)
- oprot.writeMessageBegin("result", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_fetchRequest(self, seqid, iprot, oprot):
- args = fetchRequest_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = fetchRequest_result()
- result.success = self._handler.fetchRequest(args.functionName)
- oprot.writeMessageBegin("fetchRequest", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_failRequest(self, seqid, iprot, oprot):
- args = failRequest_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = failRequest_result()
- self._handler.failRequest(args.id)
- oprot.writeMessageBegin("failRequest", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
-
-# HELPER FUNCTIONS AND STRUCTURES
-
-class result_args:
- """
- Attributes:
- - id
- - result
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'id', None, None, ), # 1
- (2, TType.STRING, 'result', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.id) + hash(self.result)
-
- def __init__(self, id=None, result=None,):
- self.id = id
- self.result = result
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.id = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.result = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('result_args')
- if self.id is not None:
- oprot.writeFieldBegin('id', TType.STRING, 1)
- oprot.writeString(self.id.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.result is not None:
- oprot.writeFieldBegin('result', TType.STRING, 2)
- oprot.writeString(self.result.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class result_result:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('result_result')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class fetchRequest_args:
- """
- Attributes:
- - functionName
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'functionName', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.functionName)
-
- def __init__(self, functionName=None,):
- self.functionName = functionName
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.functionName = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('fetchRequest_args')
- if self.functionName is not None:
- oprot.writeFieldBegin('functionName', TType.STRING, 1)
- oprot.writeString(self.functionName.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class fetchRequest_result:
- """
- Attributes:
- - success
- """
-
- thrift_spec = (
- (0, TType.STRUCT, 'success', (DRPCRequest, DRPCRequest.thrift_spec), None, ), # 0
- )
-
- def __hash__(self):
- return 0 + hash(self.success)
-
- def __init__(self, success=None,):
- self.success = success
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 0:
- if ftype == TType.STRUCT:
- self.success = DRPCRequest()
- self.success.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('fetchRequest_result')
- if self.success is not None:
- oprot.writeFieldBegin('success', TType.STRUCT, 0)
- self.success.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class failRequest_args:
- """
- Attributes:
- - id
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'id', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.id)
-
- def __init__(self, id=None,):
- self.id = id
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.id = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('failRequest_args')
- if self.id is not None:
- oprot.writeFieldBegin('id', TType.STRING, 1)
- oprot.writeString(self.id.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class failRequest_result:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('failRequest_result')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/Nimbus-remote
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/Nimbus-remote b/jstorm-client/src/main/py/storm/Nimbus-remote
deleted file mode 100644
index 1fe7051..0000000
--- a/jstorm-client/src/main/py/storm/Nimbus-remote
+++ /dev/null
@@ -1,232 +0,0 @@
-#!/usr/bin/env python
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-import sys
-import pprint
-from urlparse import urlparse
-from thrift.transport import TTransport
-from thrift.transport import TSocket
-from thrift.transport import THttpClient
-from thrift.protocol import TBinaryProtocol
-
-import Nimbus
-from ttypes import *
-
-if len(sys.argv) <= 1 or sys.argv[1] == '--help':
- print ''
- print 'Usage: ' + sys.argv[0] + ' [-h host:port] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
- print ''
- print 'Functions:'
- print ' void submitTopology(string name, string uploadedJarLocation, string jsonConf, StormTopology topology)'
- print ' void submitTopologyWithOpts(string name, string uploadedJarLocation, string jsonConf, StormTopology topology, SubmitOptions options)'
- print ' void killTopology(string name)'
- print ' void killTopologyWithOpts(string name, KillOptions options)'
- print ' void activate(string name)'
- print ' void deactivate(string name)'
- print ' void rebalance(string name, RebalanceOptions options)'
- print ' void metricMonitor(string name, MonitorOptions options)'
- print ' void beginLibUpload(string libName)'
- print ' string beginFileUpload()'
- print ' void uploadChunk(string location, string chunk)'
- print ' void finishFileUpload(string location)'
- print ' string beginFileDownload(string file)'
- print ' string downloadChunk(string id)'
- print ' string getNimbusConf()'
- print ' ClusterSummary getClusterInfo()'
- print ' TopologyInfo getTopologyInfo(string id)'
- print ' SupervisorWorkers getSupervisorWorkers(string host)'
- print ' string getTopologyConf(string id)'
- print ' StormTopology getTopology(string id)'
- print ' StormTopology getUserTopology(string id)'
- print ' TopologyMetricInfo getTopologyMetric(string id)'
- print ''
- sys.exit(0)
-
-pp = pprint.PrettyPrinter(indent = 2)
-host = 'localhost'
-port = 9090
-uri = ''
-framed = False
-http = False
-argi = 1
-
-if sys.argv[argi] == '-h':
- parts = sys.argv[argi+1].split(':')
- host = parts[0]
- port = int(parts[1])
- argi += 2
-
-if sys.argv[argi] == '-u':
- url = urlparse(sys.argv[argi+1])
- parts = url[1].split(':')
- host = parts[0]
- if len(parts) > 1:
- port = int(parts[1])
- else:
- port = 80
- uri = url[2]
- if url[4]:
- uri += '?%s' % url[4]
- http = True
- argi += 2
-
-if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
- framed = True
- argi += 1
-
-cmd = sys.argv[argi]
-args = sys.argv[argi+1:]
-
-if http:
- transport = THttpClient.THttpClient(host, port, uri)
-else:
- socket = TSocket.TSocket(host, port)
- if framed:
- transport = TTransport.TFramedTransport(socket)
- else:
- transport = TTransport.TBufferedTransport(socket)
-protocol = TBinaryProtocol.TBinaryProtocol(transport)
-client = Nimbus.Client(protocol)
-transport.open()
-
-if cmd == 'submitTopology':
- if len(args) != 4:
- print 'submitTopology requires 4 args'
- sys.exit(1)
- pp.pprint(client.submitTopology(args[0],args[1],args[2],eval(args[3]),))
-
-elif cmd == 'submitTopologyWithOpts':
- if len(args) != 5:
- print 'submitTopologyWithOpts requires 5 args'
- sys.exit(1)
- pp.pprint(client.submitTopologyWithOpts(args[0],args[1],args[2],eval(args[3]),eval(args[4]),))
-
-elif cmd == 'killTopology':
- if len(args) != 1:
- print 'killTopology requires 1 args'
- sys.exit(1)
- pp.pprint(client.killTopology(args[0],))
-
-elif cmd == 'killTopologyWithOpts':
- if len(args) != 2:
- print 'killTopologyWithOpts requires 2 args'
- sys.exit(1)
- pp.pprint(client.killTopologyWithOpts(args[0],eval(args[1]),))
-
-elif cmd == 'activate':
- if len(args) != 1:
- print 'activate requires 1 args'
- sys.exit(1)
- pp.pprint(client.activate(args[0],))
-
-elif cmd == 'deactivate':
- if len(args) != 1:
- print 'deactivate requires 1 args'
- sys.exit(1)
- pp.pprint(client.deactivate(args[0],))
-
-elif cmd == 'rebalance':
- if len(args) != 2:
- print 'rebalance requires 2 args'
- sys.exit(1)
- pp.pprint(client.rebalance(args[0],eval(args[1]),))
-
-elif cmd == 'metricMonitor':
- if len(args) != 2:
- print 'metricMonitor requires 2 args'
- sys.exit(1)
- pp.pprint(client.metricMonitor(args[0],eval(args[1]),))
-
-elif cmd == 'beginLibUpload':
- if len(args) != 1:
- print 'beginLibUpload requires 1 args'
- sys.exit(1)
- pp.pprint(client.beginLibUpload(args[0],))
-
-elif cmd == 'beginFileUpload':
- if len(args) != 0:
- print 'beginFileUpload requires 0 args'
- sys.exit(1)
- pp.pprint(client.beginFileUpload())
-
-elif cmd == 'uploadChunk':
- if len(args) != 2:
- print 'uploadChunk requires 2 args'
- sys.exit(1)
- pp.pprint(client.uploadChunk(args[0],args[1],))
-
-elif cmd == 'finishFileUpload':
- if len(args) != 1:
- print 'finishFileUpload requires 1 args'
- sys.exit(1)
- pp.pprint(client.finishFileUpload(args[0],))
-
-elif cmd == 'beginFileDownload':
- if len(args) != 1:
- print 'beginFileDownload requires 1 args'
- sys.exit(1)
- pp.pprint(client.beginFileDownload(args[0],))
-
-elif cmd == 'downloadChunk':
- if len(args) != 1:
- print 'downloadChunk requires 1 args'
- sys.exit(1)
- pp.pprint(client.downloadChunk(args[0],))
-
-elif cmd == 'getNimbusConf':
- if len(args) != 0:
- print 'getNimbusConf requires 0 args'
- sys.exit(1)
- pp.pprint(client.getNimbusConf())
-
-elif cmd == 'getClusterInfo':
- if len(args) != 0:
- print 'getClusterInfo requires 0 args'
- sys.exit(1)
- pp.pprint(client.getClusterInfo())
-
-elif cmd == 'getTopologyInfo':
- if len(args) != 1:
- print 'getTopologyInfo requires 1 args'
- sys.exit(1)
- pp.pprint(client.getTopologyInfo(args[0],))
-
-elif cmd == 'getSupervisorWorkers':
- if len(args) != 1:
- print 'getSupervisorWorkers requires 1 args'
- sys.exit(1)
- pp.pprint(client.getSupervisorWorkers(args[0],))
-
-elif cmd == 'getTopologyConf':
- if len(args) != 1:
- print 'getTopologyConf requires 1 args'
- sys.exit(1)
- pp.pprint(client.getTopologyConf(args[0],))
-
-elif cmd == 'getTopology':
- if len(args) != 1:
- print 'getTopology requires 1 args'
- sys.exit(1)
- pp.pprint(client.getTopology(args[0],))
-
-elif cmd == 'getUserTopology':
- if len(args) != 1:
- print 'getUserTopology requires 1 args'
- sys.exit(1)
- pp.pprint(client.getUserTopology(args[0],))
-
-elif cmd == 'getTopologyMetric':
- if len(args) != 1:
- print 'getTopologyMetric requires 1 args'
- sys.exit(1)
- pp.pprint(client.getTopologyMetric(args[0],))
-
-else:
- print 'Unrecognized method %s' % cmd
- sys.exit(1)
-
-transport.close()
[04/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java b/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
new file mode 100644
index 0000000..08aab63
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/DistributedRPCInvocations.java
@@ -0,0 +1,2545 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class DistributedRPCInvocations {
+
+ public interface Iface {
+
+ public void result(String id, String result) throws org.apache.thrift.TException;
+
+ public DRPCRequest fetchRequest(String functionName) throws org.apache.thrift.TException;
+
+ public void failRequest(String id) throws org.apache.thrift.TException;
+
+ }
+
+ public interface AsyncIface {
+
+ public void result(String id, String result, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void fetchRequest(String functionName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ public void failRequest(String id, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException;
+
+ }
+
+ public static class Client extends org.apache.thrift.TServiceClient implements Iface {
+ public static class Factory implements org.apache.thrift.TServiceClientFactory<Client> {
+ public Factory() {}
+ public Client getClient(org.apache.thrift.protocol.TProtocol prot) {
+ return new Client(prot);
+ }
+ public Client getClient(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+ return new Client(iprot, oprot);
+ }
+ }
+
+ public Client(org.apache.thrift.protocol.TProtocol prot)
+ {
+ super(prot, prot);
+ }
+
+ public Client(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TProtocol oprot) {
+ super(iprot, oprot);
+ }
+
+ public void result(String id, String result) throws org.apache.thrift.TException
+ {
+ send_result(id, result);
+ recv_result();
+ }
+
+ public void send_result(String id, String result) throws org.apache.thrift.TException
+ {
+ result_args args = new result_args();
+ args.set_id(id);
+ args.set_result(result);
+ sendBase("result", args);
+ }
+
+ public void recv_result() throws org.apache.thrift.TException
+ {
+ result_result result = new result_result();
+ receiveBase(result, "result");
+ return;
+ }
+
+ public DRPCRequest fetchRequest(String functionName) throws org.apache.thrift.TException
+ {
+ send_fetchRequest(functionName);
+ return recv_fetchRequest();
+ }
+
+ public void send_fetchRequest(String functionName) throws org.apache.thrift.TException
+ {
+ fetchRequest_args args = new fetchRequest_args();
+ args.set_functionName(functionName);
+ sendBase("fetchRequest", args);
+ }
+
+ public DRPCRequest recv_fetchRequest() throws org.apache.thrift.TException
+ {
+ fetchRequest_result result = new fetchRequest_result();
+ receiveBase(result, "fetchRequest");
+ if (result.is_set_success()) {
+ return result.success;
+ }
+ throw new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.MISSING_RESULT, "fetchRequest failed: unknown result");
+ }
+
+ public void failRequest(String id) throws org.apache.thrift.TException
+ {
+ send_failRequest(id);
+ recv_failRequest();
+ }
+
+ public void send_failRequest(String id) throws org.apache.thrift.TException
+ {
+ failRequest_args args = new failRequest_args();
+ args.set_id(id);
+ sendBase("failRequest", args);
+ }
+
+ public void recv_failRequest() throws org.apache.thrift.TException
+ {
+ failRequest_result result = new failRequest_result();
+ receiveBase(result, "failRequest");
+ return;
+ }
+
+ }
+ public static class AsyncClient extends org.apache.thrift.async.TAsyncClient implements AsyncIface {
+ public static class Factory implements org.apache.thrift.async.TAsyncClientFactory<AsyncClient> {
+ private org.apache.thrift.async.TAsyncClientManager clientManager;
+ private org.apache.thrift.protocol.TProtocolFactory protocolFactory;
+ public Factory(org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.protocol.TProtocolFactory protocolFactory) {
+ this.clientManager = clientManager;
+ this.protocolFactory = protocolFactory;
+ }
+ public AsyncClient getAsyncClient(org.apache.thrift.transport.TNonblockingTransport transport) {
+ return new AsyncClient(protocolFactory, clientManager, transport);
+ }
+ }
+
+ public AsyncClient(org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.async.TAsyncClientManager clientManager, org.apache.thrift.transport.TNonblockingTransport transport) {
+ super(protocolFactory, clientManager, transport);
+ }
+
+ public void result(String id, String result, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ result_call method_call = new result_call(id, result, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class result_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String id;
+ private String result;
+ public result_call(String id, String result, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.id = id;
+ this.result = result;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("result", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ result_args args = new result_args();
+ args.set_id(id);
+ args.set_result(result);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_result();
+ }
+ }
+
+ public void fetchRequest(String functionName, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ fetchRequest_call method_call = new fetchRequest_call(functionName, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class fetchRequest_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String functionName;
+ public fetchRequest_call(String functionName, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.functionName = functionName;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("fetchRequest", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ fetchRequest_args args = new fetchRequest_args();
+ args.set_functionName(functionName);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public DRPCRequest getResult() throws org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ return (new Client(prot)).recv_fetchRequest();
+ }
+ }
+
+ public void failRequest(String id, org.apache.thrift.async.AsyncMethodCallback resultHandler) throws org.apache.thrift.TException {
+ checkReady();
+ failRequest_call method_call = new failRequest_call(id, resultHandler, this, ___protocolFactory, ___transport);
+ this.___currentMethod = method_call;
+ ___manager.call(method_call);
+ }
+
+ public static class failRequest_call extends org.apache.thrift.async.TAsyncMethodCall {
+ private String id;
+ public failRequest_call(String id, org.apache.thrift.async.AsyncMethodCallback resultHandler, org.apache.thrift.async.TAsyncClient client, org.apache.thrift.protocol.TProtocolFactory protocolFactory, org.apache.thrift.transport.TNonblockingTransport transport) throws org.apache.thrift.TException {
+ super(client, protocolFactory, transport, resultHandler, false);
+ this.id = id;
+ }
+
+ public void write_args(org.apache.thrift.protocol.TProtocol prot) throws org.apache.thrift.TException {
+ prot.writeMessageBegin(new org.apache.thrift.protocol.TMessage("failRequest", org.apache.thrift.protocol.TMessageType.CALL, 0));
+ failRequest_args args = new failRequest_args();
+ args.set_id(id);
+ args.write(prot);
+ prot.writeMessageEnd();
+ }
+
+ public void getResult() throws org.apache.thrift.TException {
+ if (getState() != org.apache.thrift.async.TAsyncMethodCall.State.RESPONSE_READ) {
+ throw new IllegalStateException("Method call not finished!");
+ }
+ org.apache.thrift.transport.TMemoryInputTransport memoryTransport = new org.apache.thrift.transport.TMemoryInputTransport(getFrameBuffer().array());
+ org.apache.thrift.protocol.TProtocol prot = client.getProtocolFactory().getProtocol(memoryTransport);
+ (new Client(prot)).recv_failRequest();
+ }
+ }
+
+ }
+
+ public static class Processor<I extends Iface> extends org.apache.thrift.TBaseProcessor<I> implements org.apache.thrift.TProcessor {
+ private static final Logger LOGGER = LoggerFactory.getLogger(Processor.class.getName());
+ public Processor(I iface) {
+ super(iface, getProcessMap(new HashMap<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>>()));
+ }
+
+ protected Processor(I iface, Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
+ super(iface, getProcessMap(processMap));
+ }
+
+ private static <I extends Iface> Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> getProcessMap(Map<String, org.apache.thrift.ProcessFunction<I, ? extends org.apache.thrift.TBase>> processMap) {
+ processMap.put("result", new result());
+ processMap.put("fetchRequest", new fetchRequest());
+ processMap.put("failRequest", new failRequest());
+ return processMap;
+ }
+
+ public static class result<I extends Iface> extends org.apache.thrift.ProcessFunction<I, result_args> {
+ public result() {
+ super("result");
+ }
+
+ public result_args getEmptyArgsInstance() {
+ return new result_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public result_result getResult(I iface, result_args args) throws org.apache.thrift.TException {
+ result_result result = new result_result();
+ iface.result(args.id, args.result);
+ return result;
+ }
+ }
+
+ public static class fetchRequest<I extends Iface> extends org.apache.thrift.ProcessFunction<I, fetchRequest_args> {
+ public fetchRequest() {
+ super("fetchRequest");
+ }
+
+ public fetchRequest_args getEmptyArgsInstance() {
+ return new fetchRequest_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public fetchRequest_result getResult(I iface, fetchRequest_args args) throws org.apache.thrift.TException {
+ fetchRequest_result result = new fetchRequest_result();
+ result.success = iface.fetchRequest(args.functionName);
+ return result;
+ }
+ }
+
+ public static class failRequest<I extends Iface> extends org.apache.thrift.ProcessFunction<I, failRequest_args> {
+ public failRequest() {
+ super("failRequest");
+ }
+
+ public failRequest_args getEmptyArgsInstance() {
+ return new failRequest_args();
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public failRequest_result getResult(I iface, failRequest_args args) throws org.apache.thrift.TException {
+ failRequest_result result = new failRequest_result();
+ iface.failRequest(args.id);
+ return result;
+ }
+ }
+
+ }
+
+ public static class AsyncProcessor<I extends AsyncIface> extends org.apache.thrift.TBaseAsyncProcessor<I> {
+ private static final Logger LOGGER = LoggerFactory.getLogger(AsyncProcessor.class.getName());
+ public AsyncProcessor(I iface) {
+ super(iface, getProcessMap(new HashMap<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>>()));
+ }
+
+ protected AsyncProcessor(I iface, Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
+ super(iface, getProcessMap(processMap));
+ }
+
+ private static <I extends AsyncIface> Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase,?>> getProcessMap(Map<String, org.apache.thrift.AsyncProcessFunction<I, ? extends org.apache.thrift.TBase, ?>> processMap) {
+ processMap.put("result", new result());
+ processMap.put("fetchRequest", new fetchRequest());
+ processMap.put("failRequest", new failRequest());
+ return processMap;
+ }
+
+ public static class result<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, result_args, Void> {
+ public result() {
+ super("result");
+ }
+
+ public result_args getEmptyArgsInstance() {
+ return new result_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ result_result result = new result_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ result_result result = new result_result();
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, result_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.result(args.id, args.result,resultHandler);
+ }
+ }
+
+ public static class fetchRequest<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, fetchRequest_args, DRPCRequest> {
+ public fetchRequest() {
+ super("fetchRequest");
+ }
+
+ public fetchRequest_args getEmptyArgsInstance() {
+ return new fetchRequest_args();
+ }
+
+ public AsyncMethodCallback<DRPCRequest> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<DRPCRequest>() {
+ public void onComplete(DRPCRequest o) {
+ fetchRequest_result result = new fetchRequest_result();
+ result.success = o;
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ fetchRequest_result result = new fetchRequest_result();
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, fetchRequest_args args, org.apache.thrift.async.AsyncMethodCallback<DRPCRequest> resultHandler) throws TException {
+ iface.fetchRequest(args.functionName,resultHandler);
+ }
+ }
+
+ public static class failRequest<I extends AsyncIface> extends org.apache.thrift.AsyncProcessFunction<I, failRequest_args, Void> {
+ public failRequest() {
+ super("failRequest");
+ }
+
+ public failRequest_args getEmptyArgsInstance() {
+ return new failRequest_args();
+ }
+
+ public AsyncMethodCallback<Void> getResultHandler(final AsyncFrameBuffer fb, final int seqid) {
+ final org.apache.thrift.AsyncProcessFunction fcall = this;
+ return new AsyncMethodCallback<Void>() {
+ public void onComplete(Void o) {
+ failRequest_result result = new failRequest_result();
+ try {
+ fcall.sendResponse(fb,result, org.apache.thrift.protocol.TMessageType.REPLY,seqid);
+ return;
+ } catch (Exception e) {
+ LOGGER.error("Exception writing to internal frame buffer", e);
+ }
+ fb.close();
+ }
+ public void onError(Exception e) {
+ byte msgType = org.apache.thrift.protocol.TMessageType.REPLY;
+ org.apache.thrift.TBase msg;
+ failRequest_result result = new failRequest_result();
+ {
+ msgType = org.apache.thrift.protocol.TMessageType.EXCEPTION;
+ msg = (org.apache.thrift.TBase)new org.apache.thrift.TApplicationException(org.apache.thrift.TApplicationException.INTERNAL_ERROR, e.getMessage());
+ }
+ try {
+ fcall.sendResponse(fb,msg,msgType,seqid);
+ return;
+ } catch (Exception ex) {
+ LOGGER.error("Exception writing to internal frame buffer", ex);
+ }
+ fb.close();
+ }
+ };
+ }
+
+ protected boolean isOneway() {
+ return false;
+ }
+
+ public void start(I iface, failRequest_args args, org.apache.thrift.async.AsyncMethodCallback<Void> resultHandler) throws TException {
+ iface.failRequest(args.id,resultHandler);
+ }
+ }
+
+ }
+
+ public static class result_args implements org.apache.thrift.TBase<result_args, result_args._Fields>, java.io.Serializable, Cloneable, Comparable<result_args> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("result_args");
+
+ private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField RESULT_FIELD_DESC = new org.apache.thrift.protocol.TField("result", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new result_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new result_argsTupleSchemeFactory());
+ }
+
+ private String id; // required
+ private String result; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ ID((short)1, "id"),
+ RESULT((short)2, "result");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // ID
+ return ID;
+ case 2: // RESULT
+ return RESULT;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.RESULT, new org.apache.thrift.meta_data.FieldMetaData("result", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(result_args.class, metaDataMap);
+ }
+
+ public result_args() {
+ }
+
+ public result_args(
+ String id,
+ String result)
+ {
+ this();
+ this.id = id;
+ this.result = result;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public result_args(result_args other) {
+ if (other.is_set_id()) {
+ this.id = other.id;
+ }
+ if (other.is_set_result()) {
+ this.result = other.result;
+ }
+ }
+
+ public result_args deepCopy() {
+ return new result_args(this);
+ }
+
+ @Override
+ public void clear() {
+ this.id = null;
+ this.result = null;
+ }
+
+ public String get_id() {
+ return this.id;
+ }
+
+ public void set_id(String id) {
+ this.id = id;
+ }
+
+ public void unset_id() {
+ this.id = null;
+ }
+
+ /** Returns true if field id is set (has been assigned a value) and false otherwise */
+ public boolean is_set_id() {
+ return this.id != null;
+ }
+
+ public void set_id_isSet(boolean value) {
+ if (!value) {
+ this.id = null;
+ }
+ }
+
+ public String get_result() {
+ return this.result;
+ }
+
+ public void set_result(String result) {
+ this.result = result;
+ }
+
+ public void unset_result() {
+ this.result = null;
+ }
+
+ /** Returns true if field result is set (has been assigned a value) and false otherwise */
+ public boolean is_set_result() {
+ return this.result != null;
+ }
+
+ public void set_result_isSet(boolean value) {
+ if (!value) {
+ this.result = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case ID:
+ if (value == null) {
+ unset_id();
+ } else {
+ set_id((String)value);
+ }
+ break;
+
+ case RESULT:
+ if (value == null) {
+ unset_result();
+ } else {
+ set_result((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case ID:
+ return get_id();
+
+ case RESULT:
+ return get_result();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case ID:
+ return is_set_id();
+ case RESULT:
+ return is_set_result();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof result_args)
+ return this.equals((result_args)that);
+ return false;
+ }
+
+ public boolean equals(result_args that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_id = true && this.is_set_id();
+ boolean that_present_id = true && that.is_set_id();
+ if (this_present_id || that_present_id) {
+ if (!(this_present_id && that_present_id))
+ return false;
+ if (!this.id.equals(that.id))
+ return false;
+ }
+
+ boolean this_present_result = true && this.is_set_result();
+ boolean that_present_result = true && that.is_set_result();
+ if (this_present_result || that_present_result) {
+ if (!(this_present_result && that_present_result))
+ return false;
+ if (!this.result.equals(that.result))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_id = true && (is_set_id());
+ list.add(present_id);
+ if (present_id)
+ list.add(id);
+
+ boolean present_result = true && (is_set_result());
+ list.add(present_result);
+ if (present_result)
+ list.add(result);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(result_args other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_id()).compareTo(other.is_set_id());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_id()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_result()).compareTo(other.is_set_result());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_result()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.result, other.result);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("result_args(");
+ boolean first = true;
+
+ sb.append("id:");
+ if (this.id == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.id);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("result:");
+ if (this.result == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.result);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class result_argsStandardSchemeFactory implements SchemeFactory {
+ public result_argsStandardScheme getScheme() {
+ return new result_argsStandardScheme();
+ }
+ }
+
+ private static class result_argsStandardScheme extends StandardScheme<result_args> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, result_args struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // ID
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.id = iprot.readString();
+ struct.set_id_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // RESULT
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.result = iprot.readString();
+ struct.set_result_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, result_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.id != null) {
+ oprot.writeFieldBegin(ID_FIELD_DESC);
+ oprot.writeString(struct.id);
+ oprot.writeFieldEnd();
+ }
+ if (struct.result != null) {
+ oprot.writeFieldBegin(RESULT_FIELD_DESC);
+ oprot.writeString(struct.result);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class result_argsTupleSchemeFactory implements SchemeFactory {
+ public result_argsTupleScheme getScheme() {
+ return new result_argsTupleScheme();
+ }
+ }
+
+ private static class result_argsTupleScheme extends TupleScheme<result_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, result_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_id()) {
+ optionals.set(0);
+ }
+ if (struct.is_set_result()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.is_set_id()) {
+ oprot.writeString(struct.id);
+ }
+ if (struct.is_set_result()) {
+ oprot.writeString(struct.result);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, result_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ struct.id = iprot.readString();
+ struct.set_id_isSet(true);
+ }
+ if (incoming.get(1)) {
+ struct.result = iprot.readString();
+ struct.set_result_isSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class result_result implements org.apache.thrift.TBase<result_result, result_result._Fields>, java.io.Serializable, Cloneable, Comparable<result_result> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("result_result");
+
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new result_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new result_resultTupleSchemeFactory());
+ }
+
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(result_result.class, metaDataMap);
+ }
+
+ public result_result() {
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public result_result(result_result other) {
+ }
+
+ public result_result deepCopy() {
+ return new result_result(this);
+ }
+
+ @Override
+ public void clear() {
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof result_result)
+ return this.equals((result_result)that);
+ return false;
+ }
+
+ public boolean equals(result_result that) {
+ if (that == null)
+ return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(result_result other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("result_result(");
+ boolean first = true;
+
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class result_resultStandardSchemeFactory implements SchemeFactory {
+ public result_resultStandardScheme getScheme() {
+ return new result_resultStandardScheme();
+ }
+ }
+
+ private static class result_resultStandardScheme extends StandardScheme<result_result> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, result_result struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, result_result struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class result_resultTupleSchemeFactory implements SchemeFactory {
+ public result_resultTupleScheme getScheme() {
+ return new result_resultTupleScheme();
+ }
+ }
+
+ private static class result_resultTupleScheme extends TupleScheme<result_result> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, result_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, result_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ }
+ }
+
+ }
+
+ public static class fetchRequest_args implements org.apache.thrift.TBase<fetchRequest_args, fetchRequest_args._Fields>, java.io.Serializable, Cloneable, Comparable<fetchRequest_args> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("fetchRequest_args");
+
+ private static final org.apache.thrift.protocol.TField FUNCTION_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("functionName", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new fetchRequest_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new fetchRequest_argsTupleSchemeFactory());
+ }
+
+ private String functionName; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ FUNCTION_NAME((short)1, "functionName");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // FUNCTION_NAME
+ return FUNCTION_NAME;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FUNCTION_NAME, new org.apache.thrift.meta_data.FieldMetaData("functionName", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(fetchRequest_args.class, metaDataMap);
+ }
+
+ public fetchRequest_args() {
+ }
+
+ public fetchRequest_args(
+ String functionName)
+ {
+ this();
+ this.functionName = functionName;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public fetchRequest_args(fetchRequest_args other) {
+ if (other.is_set_functionName()) {
+ this.functionName = other.functionName;
+ }
+ }
+
+ public fetchRequest_args deepCopy() {
+ return new fetchRequest_args(this);
+ }
+
+ @Override
+ public void clear() {
+ this.functionName = null;
+ }
+
+ public String get_functionName() {
+ return this.functionName;
+ }
+
+ public void set_functionName(String functionName) {
+ this.functionName = functionName;
+ }
+
+ public void unset_functionName() {
+ this.functionName = null;
+ }
+
+ /** Returns true if field functionName is set (has been assigned a value) and false otherwise */
+ public boolean is_set_functionName() {
+ return this.functionName != null;
+ }
+
+ public void set_functionName_isSet(boolean value) {
+ if (!value) {
+ this.functionName = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case FUNCTION_NAME:
+ if (value == null) {
+ unset_functionName();
+ } else {
+ set_functionName((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case FUNCTION_NAME:
+ return get_functionName();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case FUNCTION_NAME:
+ return is_set_functionName();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof fetchRequest_args)
+ return this.equals((fetchRequest_args)that);
+ return false;
+ }
+
+ public boolean equals(fetchRequest_args that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_functionName = true && this.is_set_functionName();
+ boolean that_present_functionName = true && that.is_set_functionName();
+ if (this_present_functionName || that_present_functionName) {
+ if (!(this_present_functionName && that_present_functionName))
+ return false;
+ if (!this.functionName.equals(that.functionName))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_functionName = true && (is_set_functionName());
+ list.add(present_functionName);
+ if (present_functionName)
+ list.add(functionName);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(fetchRequest_args other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_functionName()).compareTo(other.is_set_functionName());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_functionName()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.functionName, other.functionName);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("fetchRequest_args(");
+ boolean first = true;
+
+ sb.append("functionName:");
+ if (this.functionName == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.functionName);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class fetchRequest_argsStandardSchemeFactory implements SchemeFactory {
+ public fetchRequest_argsStandardScheme getScheme() {
+ return new fetchRequest_argsStandardScheme();
+ }
+ }
+
+ private static class fetchRequest_argsStandardScheme extends StandardScheme<fetchRequest_args> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, fetchRequest_args struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // FUNCTION_NAME
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.functionName = iprot.readString();
+ struct.set_functionName_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, fetchRequest_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.functionName != null) {
+ oprot.writeFieldBegin(FUNCTION_NAME_FIELD_DESC);
+ oprot.writeString(struct.functionName);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class fetchRequest_argsTupleSchemeFactory implements SchemeFactory {
+ public fetchRequest_argsTupleScheme getScheme() {
+ return new fetchRequest_argsTupleScheme();
+ }
+ }
+
+ private static class fetchRequest_argsTupleScheme extends TupleScheme<fetchRequest_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, fetchRequest_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_functionName()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_functionName()) {
+ oprot.writeString(struct.functionName);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, fetchRequest_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.functionName = iprot.readString();
+ struct.set_functionName_isSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class fetchRequest_result implements org.apache.thrift.TBase<fetchRequest_result, fetchRequest_result._Fields>, java.io.Serializable, Cloneable, Comparable<fetchRequest_result> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("fetchRequest_result");
+
+ private static final org.apache.thrift.protocol.TField SUCCESS_FIELD_DESC = new org.apache.thrift.protocol.TField("success", org.apache.thrift.protocol.TType.STRUCT, (short)0);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new fetchRequest_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new fetchRequest_resultTupleSchemeFactory());
+ }
+
+ private DRPCRequest success; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ SUCCESS((short)0, "success");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 0: // SUCCESS
+ return SUCCESS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.SUCCESS, new org.apache.thrift.meta_data.FieldMetaData("success", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, DRPCRequest.class)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(fetchRequest_result.class, metaDataMap);
+ }
+
+ public fetchRequest_result() {
+ }
+
+ public fetchRequest_result(
+ DRPCRequest success)
+ {
+ this();
+ this.success = success;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public fetchRequest_result(fetchRequest_result other) {
+ if (other.is_set_success()) {
+ this.success = new DRPCRequest(other.success);
+ }
+ }
+
+ public fetchRequest_result deepCopy() {
+ return new fetchRequest_result(this);
+ }
+
+ @Override
+ public void clear() {
+ this.success = null;
+ }
+
+ public DRPCRequest get_success() {
+ return this.success;
+ }
+
+ public void set_success(DRPCRequest success) {
+ this.success = success;
+ }
+
+ public void unset_success() {
+ this.success = null;
+ }
+
+ /** Returns true if field success is set (has been assigned a value) and false otherwise */
+ public boolean is_set_success() {
+ return this.success != null;
+ }
+
+ public void set_success_isSet(boolean value) {
+ if (!value) {
+ this.success = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case SUCCESS:
+ if (value == null) {
+ unset_success();
+ } else {
+ set_success((DRPCRequest)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case SUCCESS:
+ return get_success();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case SUCCESS:
+ return is_set_success();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof fetchRequest_result)
+ return this.equals((fetchRequest_result)that);
+ return false;
+ }
+
+ public boolean equals(fetchRequest_result that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_success = true && this.is_set_success();
+ boolean that_present_success = true && that.is_set_success();
+ if (this_present_success || that_present_success) {
+ if (!(this_present_success && that_present_success))
+ return false;
+ if (!this.success.equals(that.success))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_success = true && (is_set_success());
+ list.add(present_success);
+ if (present_success)
+ list.add(success);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(fetchRequest_result other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_success()).compareTo(other.is_set_success());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_success()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.success, other.success);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("fetchRequest_result(");
+ boolean first = true;
+
+ sb.append("success:");
+ if (this.success == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.success);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ if (success != null) {
+ success.validate();
+ }
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class fetchRequest_resultStandardSchemeFactory implements SchemeFactory {
+ public fetchRequest_resultStandardScheme getScheme() {
+ return new fetchRequest_resultStandardScheme();
+ }
+ }
+
+ private static class fetchRequest_resultStandardScheme extends StandardScheme<fetchRequest_result> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, fetchRequest_result struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 0: // SUCCESS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.success = new DRPCRequest();
+ struct.success.read(iprot);
+ struct.set_success_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, fetchRequest_result struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.success != null) {
+ oprot.writeFieldBegin(SUCCESS_FIELD_DESC);
+ struct.success.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class fetchRequest_resultTupleSchemeFactory implements SchemeFactory {
+ public fetchRequest_resultTupleScheme getScheme() {
+ return new fetchRequest_resultTupleScheme();
+ }
+ }
+
+ private static class fetchRequest_resultTupleScheme extends TupleScheme<fetchRequest_result> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, fetchRequest_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_success()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_success()) {
+ struct.success.write(oprot);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, fetchRequest_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.success = new DRPCRequest();
+ struct.success.read(iprot);
+ struct.set_success_isSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class failRequest_args implements org.apache.thrift.TBase<failRequest_args, failRequest_args._Fields>, java.io.Serializable, Cloneable, Comparable<failRequest_args> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("failRequest_args");
+
+ private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new failRequest_argsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new failRequest_argsTupleSchemeFactory());
+ }
+
+ private String id; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ ID((short)1, "id");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // ID
+ return ID;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(failRequest_args.class, metaDataMap);
+ }
+
+ public failRequest_args() {
+ }
+
+ public failRequest_args(
+ String id)
+ {
+ this();
+ this.id = id;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public failRequest_args(failRequest_args other) {
+ if (other.is_set_id()) {
+ this.id = other.id;
+ }
+ }
+
+ public failRequest_args deepCopy() {
+ return new failRequest_args(this);
+ }
+
+ @Override
+ public void clear() {
+ this.id = null;
+ }
+
+ public String get_id() {
+ return this.id;
+ }
+
+ public void set_id(String id) {
+ this.id = id;
+ }
+
+ public void unset_id() {
+ this.id = null;
+ }
+
+ /** Returns true if field id is set (has been assigned a value) and false otherwise */
+ public boolean is_set_id() {
+ return this.id != null;
+ }
+
+ public void set_id_isSet(boolean value) {
+ if (!value) {
+ this.id = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case ID:
+ if (value == null) {
+ unset_id();
+ } else {
+ set_id((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case ID:
+ return get_id();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case ID:
+ return is_set_id();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof failRequest_args)
+ return this.equals((failRequest_args)that);
+ return false;
+ }
+
+ public boolean equals(failRequest_args that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_id = true && this.is_set_id();
+ boolean that_present_id = true && that.is_set_id();
+ if (this_present_id || that_present_id) {
+ if (!(this_present_id && that_present_id))
+ return false;
+ if (!this.id.equals(that.id))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_id = true && (is_set_id());
+ list.add(present_id);
+ if (present_id)
+ list.add(id);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(failRequest_args other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_id()).compareTo(other.is_set_id());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_id()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("failRequest_args(");
+ boolean first = true;
+
+ sb.append("id:");
+ if (this.id == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.id);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class failRequest_argsStandardSchemeFactory implements SchemeFactory {
+ public failRequest_argsStandardScheme getScheme() {
+ return new failRequest_argsStandardScheme();
+ }
+ }
+
+ private static class failRequest_argsStandardScheme extends StandardScheme<failRequest_args> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, failRequest_args struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // ID
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.id = iprot.readString();
+ struct.set_id_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, failRequest_args struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.id != null) {
+ oprot.writeFieldBegin(ID_FIELD_DESC);
+ oprot.writeString(struct.id);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class failRequest_argsTupleSchemeFactory implements SchemeFactory {
+ public failRequest_argsTupleScheme getScheme() {
+ return new failRequest_argsTupleScheme();
+ }
+ }
+
+ private static class failRequest_argsTupleScheme extends TupleScheme<failRequest_args> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, failRequest_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ BitSet optionals = new BitSet();
+ if (struct.is_set_id()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_id()) {
+ oprot.writeString(struct.id);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, failRequest_args struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.id = iprot.readString();
+ struct.set_id_isSet(true);
+ }
+ }
+ }
+
+ }
+
+ public static class failRequest_result implements org.apache.thrift.TBase<failRequest_result, failRequest_result._Fields>, java.io.Serializable, Cloneable, Comparable<failRequest_result> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("failRequest_result");
+
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new failRequest_resultStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new failRequest_resultTupleSchemeFactory());
+ }
+
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+;
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(failRequest_result.class, metaDataMap);
+ }
+
+ public failRequest_result() {
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public failRequest_result(failRequest_result other) {
+ }
+
+ public failRequest_result deepCopy() {
+ return new failRequest_result(this);
+ }
+
+ @Override
+ public void clear() {
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof failRequest_result)
+ return this.equals((failRequest_result)that);
+ return false;
+ }
+
+ public boolean equals(failRequest_result that) {
+ if (that == null)
+ return false;
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(failRequest_result other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("failRequest_result(");
+ boolean first = true;
+
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class failRequest_resultStandardSchemeFactory implements SchemeFactory {
+ public failRequest_resultStandardScheme getScheme() {
+ return new failRequest_resultStandardScheme();
+ }
+ }
+
+ private static class failRequest_resultStandardScheme extends StandardScheme<failRequest_result> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, failRequest_result struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, failRequest_result struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class failRequest_resultTupleSchemeFactory implements SchemeFactory {
+ public failRequest_resultTupleScheme getScheme() {
+ return new failRequest_resultTupleScheme();
+ }
+ }
+
+ private static class failRequest_resultTupleScheme extends TupleScheme<failRequest_result> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, failRequest_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, failRequest_result struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ }
+ }
+
+ }
+
+}
[31/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TopologyAssignException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TopologyAssignException.java b/jstorm-client/src/main/java/backtype/storm/generated/TopologyAssignException.java
deleted file mode 100644
index 2907dab..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TopologyAssignException.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TopologyAssignException extends Exception implements org.apache.thrift7.TBase<TopologyAssignException, TopologyAssignException._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TopologyAssignException");
-
- private static final org.apache.thrift7.protocol.TField MSG_FIELD_DESC = new org.apache.thrift7.protocol.TField("msg", org.apache.thrift7.protocol.TType.STRING, (short)1);
-
- private String msg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- MSG((short)1, "msg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // MSG
- return MSG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.MSG, new org.apache.thrift7.meta_data.FieldMetaData("msg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TopologyAssignException.class, metaDataMap);
- }
-
- public TopologyAssignException() {
- }
-
- public TopologyAssignException(
- String msg)
- {
- this();
- this.msg = msg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TopologyAssignException(TopologyAssignException other) {
- if (other.is_set_msg()) {
- this.msg = other.msg;
- }
- }
-
- public TopologyAssignException deepCopy() {
- return new TopologyAssignException(this);
- }
-
- @Override
- public void clear() {
- this.msg = null;
- }
-
- public String get_msg() {
- return this.msg;
- }
-
- public void set_msg(String msg) {
- this.msg = msg;
- }
-
- public void unset_msg() {
- this.msg = null;
- }
-
- /** Returns true if field msg is set (has been assigned a value) and false otherwise */
- public boolean is_set_msg() {
- return this.msg != null;
- }
-
- public void set_msg_isSet(boolean value) {
- if (!value) {
- this.msg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case MSG:
- if (value == null) {
- unset_msg();
- } else {
- set_msg((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case MSG:
- return get_msg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case MSG:
- return is_set_msg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TopologyAssignException)
- return this.equals((TopologyAssignException)that);
- return false;
- }
-
- public boolean equals(TopologyAssignException that) {
- if (that == null)
- return false;
-
- boolean this_present_msg = true && this.is_set_msg();
- boolean that_present_msg = true && that.is_set_msg();
- if (this_present_msg || that_present_msg) {
- if (!(this_present_msg && that_present_msg))
- return false;
- if (!this.msg.equals(that.msg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_msg = true && (is_set_msg());
- builder.append(present_msg);
- if (present_msg)
- builder.append(msg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TopologyAssignException other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TopologyAssignException typedOther = (TopologyAssignException)other;
-
- lastComparison = Boolean.valueOf(is_set_msg()).compareTo(typedOther.is_set_msg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_msg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.msg, typedOther.msg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // MSG
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.msg = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.msg != null) {
- oprot.writeFieldBegin(MSG_FIELD_DESC);
- oprot.writeString(this.msg);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TopologyAssignException(");
- boolean first = true;
-
- sb.append("msg:");
- if (this.msg == null) {
- sb.append("null");
- } else {
- sb.append(this.msg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_msg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TopologyInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TopologyInfo.java b/jstorm-client/src/main/java/backtype/storm/generated/TopologyInfo.java
deleted file mode 100644
index a2566dc..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TopologyInfo.java
+++ /dev/null
@@ -1,1022 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TopologyInfo implements org.apache.thrift7.TBase<TopologyInfo, TopologyInfo._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TopologyInfo");
-
- private static final org.apache.thrift7.protocol.TField ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("id", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("name", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField UPTIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("uptime_secs", org.apache.thrift7.protocol.TType.I32, (short)3);
- private static final org.apache.thrift7.protocol.TField WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("workers", org.apache.thrift7.protocol.TType.LIST, (short)4);
- private static final org.apache.thrift7.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift7.protocol.TField("status", org.apache.thrift7.protocol.TType.STRING, (short)5);
- private static final org.apache.thrift7.protocol.TField TASKS_FIELD_DESC = new org.apache.thrift7.protocol.TField("tasks", org.apache.thrift7.protocol.TType.LIST, (short)6);
- private static final org.apache.thrift7.protocol.TField USER_DEF_METRIC_FIELD_DESC = new org.apache.thrift7.protocol.TField("userDefMetric", org.apache.thrift7.protocol.TType.LIST, (short)7);
-
- private String id; // required
- private String name; // required
- private int uptime_secs; // required
- private List<WorkerSummary> workers; // required
- private String status; // required
- private List<TaskSummary> tasks; // required
- private List<UserDefMetric> userDefMetric; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ID((short)1, "id"),
- NAME((short)2, "name"),
- UPTIME_SECS((short)3, "uptime_secs"),
- WORKERS((short)4, "workers"),
- STATUS((short)5, "status"),
- TASKS((short)6, "tasks"),
- USER_DEF_METRIC((short)7, "userDefMetric");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ID
- return ID;
- case 2: // NAME
- return NAME;
- case 3: // UPTIME_SECS
- return UPTIME_SECS;
- case 4: // WORKERS
- return WORKERS;
- case 5: // STATUS
- return STATUS;
- case 6: // TASKS
- return TASKS;
- case 7: // USER_DEF_METRIC
- return USER_DEF_METRIC;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __UPTIME_SECS_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ID, new org.apache.thrift7.meta_data.FieldMetaData("id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.NAME, new org.apache.thrift7.meta_data.FieldMetaData("name", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.UPTIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("uptime_secs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("workers", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, WorkerSummary.class))));
- tmpMap.put(_Fields.STATUS, new org.apache.thrift7.meta_data.FieldMetaData("status", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.TASKS, new org.apache.thrift7.meta_data.FieldMetaData("tasks", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, TaskSummary.class))));
- tmpMap.put(_Fields.USER_DEF_METRIC, new org.apache.thrift7.meta_data.FieldMetaData("userDefMetric", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, UserDefMetric.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TopologyInfo.class, metaDataMap);
- }
-
- public TopologyInfo() {
- }
-
- public TopologyInfo(
- String id,
- String name,
- int uptime_secs,
- List<WorkerSummary> workers,
- String status,
- List<TaskSummary> tasks)
- {
- this();
- this.id = id;
- this.name = name;
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- this.workers = workers;
- this.status = status;
- this.tasks = tasks;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TopologyInfo(TopologyInfo other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_id()) {
- this.id = other.id;
- }
- if (other.is_set_name()) {
- this.name = other.name;
- }
- this.uptime_secs = other.uptime_secs;
- if (other.is_set_workers()) {
- List<WorkerSummary> __this__workers = new ArrayList<WorkerSummary>();
- for (WorkerSummary other_element : other.workers) {
- __this__workers.add(new WorkerSummary(other_element));
- }
- this.workers = __this__workers;
- }
- if (other.is_set_status()) {
- this.status = other.status;
- }
- if (other.is_set_tasks()) {
- List<TaskSummary> __this__tasks = new ArrayList<TaskSummary>();
- for (TaskSummary other_element : other.tasks) {
- __this__tasks.add(new TaskSummary(other_element));
- }
- this.tasks = __this__tasks;
- }
- if (other.is_set_userDefMetric()) {
- List<UserDefMetric> __this__userDefMetric = new ArrayList<UserDefMetric>();
- for (UserDefMetric other_element : other.userDefMetric) {
- __this__userDefMetric.add(new UserDefMetric(other_element));
- }
- this.userDefMetric = __this__userDefMetric;
- }
- }
-
- public TopologyInfo deepCopy() {
- return new TopologyInfo(this);
- }
-
- @Override
- public void clear() {
- this.id = null;
- this.name = null;
- set_uptime_secs_isSet(false);
- this.uptime_secs = 0;
- this.workers = null;
- this.status = null;
- this.tasks = null;
- this.userDefMetric = null;
- }
-
- public String get_id() {
- return this.id;
- }
-
- public void set_id(String id) {
- this.id = id;
- }
-
- public void unset_id() {
- this.id = null;
- }
-
- /** Returns true if field id is set (has been assigned a value) and false otherwise */
- public boolean is_set_id() {
- return this.id != null;
- }
-
- public void set_id_isSet(boolean value) {
- if (!value) {
- this.id = null;
- }
- }
-
- public String get_name() {
- return this.name;
- }
-
- public void set_name(String name) {
- this.name = name;
- }
-
- public void unset_name() {
- this.name = null;
- }
-
- /** Returns true if field name is set (has been assigned a value) and false otherwise */
- public boolean is_set_name() {
- return this.name != null;
- }
-
- public void set_name_isSet(boolean value) {
- if (!value) {
- this.name = null;
- }
- }
-
- public int get_uptime_secs() {
- return this.uptime_secs;
- }
-
- public void set_uptime_secs(int uptime_secs) {
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- }
-
- public void unset_uptime_secs() {
- __isset_bit_vector.clear(__UPTIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field uptime_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_uptime_secs() {
- return __isset_bit_vector.get(__UPTIME_SECS_ISSET_ID);
- }
-
- public void set_uptime_secs_isSet(boolean value) {
- __isset_bit_vector.set(__UPTIME_SECS_ISSET_ID, value);
- }
-
- public int get_workers_size() {
- return (this.workers == null) ? 0 : this.workers.size();
- }
-
- public java.util.Iterator<WorkerSummary> get_workers_iterator() {
- return (this.workers == null) ? null : this.workers.iterator();
- }
-
- public void add_to_workers(WorkerSummary elem) {
- if (this.workers == null) {
- this.workers = new ArrayList<WorkerSummary>();
- }
- this.workers.add(elem);
- }
-
- public List<WorkerSummary> get_workers() {
- return this.workers;
- }
-
- public void set_workers(List<WorkerSummary> workers) {
- this.workers = workers;
- }
-
- public void unset_workers() {
- this.workers = null;
- }
-
- /** Returns true if field workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_workers() {
- return this.workers != null;
- }
-
- public void set_workers_isSet(boolean value) {
- if (!value) {
- this.workers = null;
- }
- }
-
- public String get_status() {
- return this.status;
- }
-
- public void set_status(String status) {
- this.status = status;
- }
-
- public void unset_status() {
- this.status = null;
- }
-
- /** Returns true if field status is set (has been assigned a value) and false otherwise */
- public boolean is_set_status() {
- return this.status != null;
- }
-
- public void set_status_isSet(boolean value) {
- if (!value) {
- this.status = null;
- }
- }
-
- public int get_tasks_size() {
- return (this.tasks == null) ? 0 : this.tasks.size();
- }
-
- public java.util.Iterator<TaskSummary> get_tasks_iterator() {
- return (this.tasks == null) ? null : this.tasks.iterator();
- }
-
- public void add_to_tasks(TaskSummary elem) {
- if (this.tasks == null) {
- this.tasks = new ArrayList<TaskSummary>();
- }
- this.tasks.add(elem);
- }
-
- public List<TaskSummary> get_tasks() {
- return this.tasks;
- }
-
- public void set_tasks(List<TaskSummary> tasks) {
- this.tasks = tasks;
- }
-
- public void unset_tasks() {
- this.tasks = null;
- }
-
- /** Returns true if field tasks is set (has been assigned a value) and false otherwise */
- public boolean is_set_tasks() {
- return this.tasks != null;
- }
-
- public void set_tasks_isSet(boolean value) {
- if (!value) {
- this.tasks = null;
- }
- }
-
- public int get_userDefMetric_size() {
- return (this.userDefMetric == null) ? 0 : this.userDefMetric.size();
- }
-
- public java.util.Iterator<UserDefMetric> get_userDefMetric_iterator() {
- return (this.userDefMetric == null) ? null : this.userDefMetric.iterator();
- }
-
- public void add_to_userDefMetric(UserDefMetric elem) {
- if (this.userDefMetric == null) {
- this.userDefMetric = new ArrayList<UserDefMetric>();
- }
- this.userDefMetric.add(elem);
- }
-
- public List<UserDefMetric> get_userDefMetric() {
- return this.userDefMetric;
- }
-
- public void set_userDefMetric(List<UserDefMetric> userDefMetric) {
- this.userDefMetric = userDefMetric;
- }
-
- public void unset_userDefMetric() {
- this.userDefMetric = null;
- }
-
- /** Returns true if field userDefMetric is set (has been assigned a value) and false otherwise */
- public boolean is_set_userDefMetric() {
- return this.userDefMetric != null;
- }
-
- public void set_userDefMetric_isSet(boolean value) {
- if (!value) {
- this.userDefMetric = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ID:
- if (value == null) {
- unset_id();
- } else {
- set_id((String)value);
- }
- break;
-
- case NAME:
- if (value == null) {
- unset_name();
- } else {
- set_name((String)value);
- }
- break;
-
- case UPTIME_SECS:
- if (value == null) {
- unset_uptime_secs();
- } else {
- set_uptime_secs((Integer)value);
- }
- break;
-
- case WORKERS:
- if (value == null) {
- unset_workers();
- } else {
- set_workers((List<WorkerSummary>)value);
- }
- break;
-
- case STATUS:
- if (value == null) {
- unset_status();
- } else {
- set_status((String)value);
- }
- break;
-
- case TASKS:
- if (value == null) {
- unset_tasks();
- } else {
- set_tasks((List<TaskSummary>)value);
- }
- break;
-
- case USER_DEF_METRIC:
- if (value == null) {
- unset_userDefMetric();
- } else {
- set_userDefMetric((List<UserDefMetric>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ID:
- return get_id();
-
- case NAME:
- return get_name();
-
- case UPTIME_SECS:
- return Integer.valueOf(get_uptime_secs());
-
- case WORKERS:
- return get_workers();
-
- case STATUS:
- return get_status();
-
- case TASKS:
- return get_tasks();
-
- case USER_DEF_METRIC:
- return get_userDefMetric();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ID:
- return is_set_id();
- case NAME:
- return is_set_name();
- case UPTIME_SECS:
- return is_set_uptime_secs();
- case WORKERS:
- return is_set_workers();
- case STATUS:
- return is_set_status();
- case TASKS:
- return is_set_tasks();
- case USER_DEF_METRIC:
- return is_set_userDefMetric();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TopologyInfo)
- return this.equals((TopologyInfo)that);
- return false;
- }
-
- public boolean equals(TopologyInfo that) {
- if (that == null)
- return false;
-
- boolean this_present_id = true && this.is_set_id();
- boolean that_present_id = true && that.is_set_id();
- if (this_present_id || that_present_id) {
- if (!(this_present_id && that_present_id))
- return false;
- if (!this.id.equals(that.id))
- return false;
- }
-
- boolean this_present_name = true && this.is_set_name();
- boolean that_present_name = true && that.is_set_name();
- if (this_present_name || that_present_name) {
- if (!(this_present_name && that_present_name))
- return false;
- if (!this.name.equals(that.name))
- return false;
- }
-
- boolean this_present_uptime_secs = true;
- boolean that_present_uptime_secs = true;
- if (this_present_uptime_secs || that_present_uptime_secs) {
- if (!(this_present_uptime_secs && that_present_uptime_secs))
- return false;
- if (this.uptime_secs != that.uptime_secs)
- return false;
- }
-
- boolean this_present_workers = true && this.is_set_workers();
- boolean that_present_workers = true && that.is_set_workers();
- if (this_present_workers || that_present_workers) {
- if (!(this_present_workers && that_present_workers))
- return false;
- if (!this.workers.equals(that.workers))
- return false;
- }
-
- boolean this_present_status = true && this.is_set_status();
- boolean that_present_status = true && that.is_set_status();
- if (this_present_status || that_present_status) {
- if (!(this_present_status && that_present_status))
- return false;
- if (!this.status.equals(that.status))
- return false;
- }
-
- boolean this_present_tasks = true && this.is_set_tasks();
- boolean that_present_tasks = true && that.is_set_tasks();
- if (this_present_tasks || that_present_tasks) {
- if (!(this_present_tasks && that_present_tasks))
- return false;
- if (!this.tasks.equals(that.tasks))
- return false;
- }
-
- boolean this_present_userDefMetric = true && this.is_set_userDefMetric();
- boolean that_present_userDefMetric = true && that.is_set_userDefMetric();
- if (this_present_userDefMetric || that_present_userDefMetric) {
- if (!(this_present_userDefMetric && that_present_userDefMetric))
- return false;
- if (!this.userDefMetric.equals(that.userDefMetric))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_id = true && (is_set_id());
- builder.append(present_id);
- if (present_id)
- builder.append(id);
-
- boolean present_name = true && (is_set_name());
- builder.append(present_name);
- if (present_name)
- builder.append(name);
-
- boolean present_uptime_secs = true;
- builder.append(present_uptime_secs);
- if (present_uptime_secs)
- builder.append(uptime_secs);
-
- boolean present_workers = true && (is_set_workers());
- builder.append(present_workers);
- if (present_workers)
- builder.append(workers);
-
- boolean present_status = true && (is_set_status());
- builder.append(present_status);
- if (present_status)
- builder.append(status);
-
- boolean present_tasks = true && (is_set_tasks());
- builder.append(present_tasks);
- if (present_tasks)
- builder.append(tasks);
-
- boolean present_userDefMetric = true && (is_set_userDefMetric());
- builder.append(present_userDefMetric);
- if (present_userDefMetric)
- builder.append(userDefMetric);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TopologyInfo other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TopologyInfo typedOther = (TopologyInfo)other;
-
- lastComparison = Boolean.valueOf(is_set_id()).compareTo(typedOther.is_set_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.id, typedOther.id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_name()).compareTo(typedOther.is_set_name());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_name()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.name, typedOther.name);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_uptime_secs()).compareTo(typedOther.is_set_uptime_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_uptime_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.uptime_secs, typedOther.uptime_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_workers()).compareTo(typedOther.is_set_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.workers, typedOther.workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_status()).compareTo(typedOther.is_set_status());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_status()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.status, typedOther.status);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_tasks()).compareTo(typedOther.is_set_tasks());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_tasks()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.tasks, typedOther.tasks);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_userDefMetric()).compareTo(typedOther.is_set_userDefMetric());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_userDefMetric()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.userDefMetric, typedOther.userDefMetric);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.name = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // UPTIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.uptime_secs = iprot.readI32();
- set_uptime_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list193 = iprot.readListBegin();
- this.workers = new ArrayList<WorkerSummary>(_list193.size);
- for (int _i194 = 0; _i194 < _list193.size; ++_i194)
- {
- WorkerSummary _elem195; // required
- _elem195 = new WorkerSummary();
- _elem195.read(iprot);
- this.workers.add(_elem195);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // STATUS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.status = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // TASKS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list196 = iprot.readListBegin();
- this.tasks = new ArrayList<TaskSummary>(_list196.size);
- for (int _i197 = 0; _i197 < _list196.size; ++_i197)
- {
- TaskSummary _elem198; // required
- _elem198 = new TaskSummary();
- _elem198.read(iprot);
- this.tasks.add(_elem198);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 7: // USER_DEF_METRIC
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list199 = iprot.readListBegin();
- this.userDefMetric = new ArrayList<UserDefMetric>(_list199.size);
- for (int _i200 = 0; _i200 < _list199.size; ++_i200)
- {
- UserDefMetric _elem201; // required
- _elem201 = new UserDefMetric();
- _elem201.read(iprot);
- this.userDefMetric.add(_elem201);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.id != null) {
- oprot.writeFieldBegin(ID_FIELD_DESC);
- oprot.writeString(this.id);
- oprot.writeFieldEnd();
- }
- if (this.name != null) {
- oprot.writeFieldBegin(NAME_FIELD_DESC);
- oprot.writeString(this.name);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(UPTIME_SECS_FIELD_DESC);
- oprot.writeI32(this.uptime_secs);
- oprot.writeFieldEnd();
- if (this.workers != null) {
- oprot.writeFieldBegin(WORKERS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.workers.size()));
- for (WorkerSummary _iter202 : this.workers)
- {
- _iter202.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.status != null) {
- oprot.writeFieldBegin(STATUS_FIELD_DESC);
- oprot.writeString(this.status);
- oprot.writeFieldEnd();
- }
- if (this.tasks != null) {
- oprot.writeFieldBegin(TASKS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.tasks.size()));
- for (TaskSummary _iter203 : this.tasks)
- {
- _iter203.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.userDefMetric != null) {
- if (is_set_userDefMetric()) {
- oprot.writeFieldBegin(USER_DEF_METRIC_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.userDefMetric.size()));
- for (UserDefMetric _iter204 : this.userDefMetric)
- {
- _iter204.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TopologyInfo(");
- boolean first = true;
-
- sb.append("id:");
- if (this.id == null) {
- sb.append("null");
- } else {
- sb.append(this.id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("name:");
- if (this.name == null) {
- sb.append("null");
- } else {
- sb.append(this.name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("uptime_secs:");
- sb.append(this.uptime_secs);
- first = false;
- if (!first) sb.append(", ");
- sb.append("workers:");
- if (this.workers == null) {
- sb.append("null");
- } else {
- sb.append(this.workers);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("status:");
- if (this.status == null) {
- sb.append("null");
- } else {
- sb.append(this.status);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("tasks:");
- if (this.tasks == null) {
- sb.append("null");
- } else {
- sb.append(this.tasks);
- }
- first = false;
- if (is_set_userDefMetric()) {
- if (!first) sb.append(", ");
- sb.append("userDefMetric:");
- if (this.userDefMetric == null) {
- sb.append("null");
- } else {
- sb.append(this.userDefMetric);
- }
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
- }
-
- if (!is_set_name()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
- }
-
- if (!is_set_uptime_secs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'uptime_secs' is unset! Struct:" + toString());
- }
-
- if (!is_set_workers()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'workers' is unset! Struct:" + toString());
- }
-
- if (!is_set_status()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'status' is unset! Struct:" + toString());
- }
-
- if (!is_set_tasks()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'tasks' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TopologyInitialStatus.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TopologyInitialStatus.java b/jstorm-client/src/main/java/backtype/storm/generated/TopologyInitialStatus.java
deleted file mode 100644
index 2c97dd7..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TopologyInitialStatus.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-
-import java.util.Map;
-import java.util.HashMap;
-import org.apache.thrift7.TEnum;
-
-public enum TopologyInitialStatus implements org.apache.thrift7.TEnum {
- ACTIVE(1),
- INACTIVE(2);
-
- private final int value;
-
- private TopologyInitialStatus(int value) {
- this.value = value;
- }
-
- /**
- * Get the integer value of this enum value, as defined in the Thrift IDL.
- */
- public int getValue() {
- return value;
- }
-
- /**
- * Find a the enum type by its integer value, as defined in the Thrift IDL.
- * @return null if the value is not found.
- */
- public static TopologyInitialStatus findByValue(int value) {
- switch (value) {
- case 1:
- return ACTIVE;
- case 2:
- return INACTIVE;
- default:
- return null;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TopologyMetricInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TopologyMetricInfo.java b/jstorm-client/src/main/java/backtype/storm/generated/TopologyMetricInfo.java
deleted file mode 100644
index 9d584ec..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TopologyMetricInfo.java
+++ /dev/null
@@ -1,594 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TopologyMetricInfo implements org.apache.thrift7.TBase<TopologyMetricInfo, TopologyMetricInfo._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TopologyMetricInfo");
-
- private static final org.apache.thrift7.protocol.TField TOPOLOGY_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("topology_id", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField TASK_METRIC_LIST_FIELD_DESC = new org.apache.thrift7.protocol.TField("task_metric_list", org.apache.thrift7.protocol.TType.LIST, (short)2);
- private static final org.apache.thrift7.protocol.TField WORKER_METRIC_LIST_FIELD_DESC = new org.apache.thrift7.protocol.TField("worker_metric_list", org.apache.thrift7.protocol.TType.LIST, (short)3);
-
- private String topology_id; // required
- private List<TaskMetricData> task_metric_list; // required
- private List<WorkerMetricData> worker_metric_list; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- TOPOLOGY_ID((short)1, "topology_id"),
- TASK_METRIC_LIST((short)2, "task_metric_list"),
- WORKER_METRIC_LIST((short)3, "worker_metric_list");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // TOPOLOGY_ID
- return TOPOLOGY_ID;
- case 2: // TASK_METRIC_LIST
- return TASK_METRIC_LIST;
- case 3: // WORKER_METRIC_LIST
- return WORKER_METRIC_LIST;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TOPOLOGY_ID, new org.apache.thrift7.meta_data.FieldMetaData("topology_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.TASK_METRIC_LIST, new org.apache.thrift7.meta_data.FieldMetaData("task_metric_list", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, TaskMetricData.class))));
- tmpMap.put(_Fields.WORKER_METRIC_LIST, new org.apache.thrift7.meta_data.FieldMetaData("worker_metric_list", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, WorkerMetricData.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TopologyMetricInfo.class, metaDataMap);
- }
-
- public TopologyMetricInfo() {
- }
-
- public TopologyMetricInfo(
- String topology_id)
- {
- this();
- this.topology_id = topology_id;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TopologyMetricInfo(TopologyMetricInfo other) {
- if (other.is_set_topology_id()) {
- this.topology_id = other.topology_id;
- }
- if (other.is_set_task_metric_list()) {
- List<TaskMetricData> __this__task_metric_list = new ArrayList<TaskMetricData>();
- for (TaskMetricData other_element : other.task_metric_list) {
- __this__task_metric_list.add(new TaskMetricData(other_element));
- }
- this.task_metric_list = __this__task_metric_list;
- }
- if (other.is_set_worker_metric_list()) {
- List<WorkerMetricData> __this__worker_metric_list = new ArrayList<WorkerMetricData>();
- for (WorkerMetricData other_element : other.worker_metric_list) {
- __this__worker_metric_list.add(new WorkerMetricData(other_element));
- }
- this.worker_metric_list = __this__worker_metric_list;
- }
- }
-
- public TopologyMetricInfo deepCopy() {
- return new TopologyMetricInfo(this);
- }
-
- @Override
- public void clear() {
- this.topology_id = null;
- this.task_metric_list = null;
- this.worker_metric_list = null;
- }
-
- public String get_topology_id() {
- return this.topology_id;
- }
-
- public void set_topology_id(String topology_id) {
- this.topology_id = topology_id;
- }
-
- public void unset_topology_id() {
- this.topology_id = null;
- }
-
- /** Returns true if field topology_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_topology_id() {
- return this.topology_id != null;
- }
-
- public void set_topology_id_isSet(boolean value) {
- if (!value) {
- this.topology_id = null;
- }
- }
-
- public int get_task_metric_list_size() {
- return (this.task_metric_list == null) ? 0 : this.task_metric_list.size();
- }
-
- public java.util.Iterator<TaskMetricData> get_task_metric_list_iterator() {
- return (this.task_metric_list == null) ? null : this.task_metric_list.iterator();
- }
-
- public void add_to_task_metric_list(TaskMetricData elem) {
- if (this.task_metric_list == null) {
- this.task_metric_list = new ArrayList<TaskMetricData>();
- }
- this.task_metric_list.add(elem);
- }
-
- public List<TaskMetricData> get_task_metric_list() {
- return this.task_metric_list;
- }
-
- public void set_task_metric_list(List<TaskMetricData> task_metric_list) {
- this.task_metric_list = task_metric_list;
- }
-
- public void unset_task_metric_list() {
- this.task_metric_list = null;
- }
-
- /** Returns true if field task_metric_list is set (has been assigned a value) and false otherwise */
- public boolean is_set_task_metric_list() {
- return this.task_metric_list != null;
- }
-
- public void set_task_metric_list_isSet(boolean value) {
- if (!value) {
- this.task_metric_list = null;
- }
- }
-
- public int get_worker_metric_list_size() {
- return (this.worker_metric_list == null) ? 0 : this.worker_metric_list.size();
- }
-
- public java.util.Iterator<WorkerMetricData> get_worker_metric_list_iterator() {
- return (this.worker_metric_list == null) ? null : this.worker_metric_list.iterator();
- }
-
- public void add_to_worker_metric_list(WorkerMetricData elem) {
- if (this.worker_metric_list == null) {
- this.worker_metric_list = new ArrayList<WorkerMetricData>();
- }
- this.worker_metric_list.add(elem);
- }
-
- public List<WorkerMetricData> get_worker_metric_list() {
- return this.worker_metric_list;
- }
-
- public void set_worker_metric_list(List<WorkerMetricData> worker_metric_list) {
- this.worker_metric_list = worker_metric_list;
- }
-
- public void unset_worker_metric_list() {
- this.worker_metric_list = null;
- }
-
- /** Returns true if field worker_metric_list is set (has been assigned a value) and false otherwise */
- public boolean is_set_worker_metric_list() {
- return this.worker_metric_list != null;
- }
-
- public void set_worker_metric_list_isSet(boolean value) {
- if (!value) {
- this.worker_metric_list = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case TOPOLOGY_ID:
- if (value == null) {
- unset_topology_id();
- } else {
- set_topology_id((String)value);
- }
- break;
-
- case TASK_METRIC_LIST:
- if (value == null) {
- unset_task_metric_list();
- } else {
- set_task_metric_list((List<TaskMetricData>)value);
- }
- break;
-
- case WORKER_METRIC_LIST:
- if (value == null) {
- unset_worker_metric_list();
- } else {
- set_worker_metric_list((List<WorkerMetricData>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case TOPOLOGY_ID:
- return get_topology_id();
-
- case TASK_METRIC_LIST:
- return get_task_metric_list();
-
- case WORKER_METRIC_LIST:
- return get_worker_metric_list();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case TOPOLOGY_ID:
- return is_set_topology_id();
- case TASK_METRIC_LIST:
- return is_set_task_metric_list();
- case WORKER_METRIC_LIST:
- return is_set_worker_metric_list();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TopologyMetricInfo)
- return this.equals((TopologyMetricInfo)that);
- return false;
- }
-
- public boolean equals(TopologyMetricInfo that) {
- if (that == null)
- return false;
-
- boolean this_present_topology_id = true && this.is_set_topology_id();
- boolean that_present_topology_id = true && that.is_set_topology_id();
- if (this_present_topology_id || that_present_topology_id) {
- if (!(this_present_topology_id && that_present_topology_id))
- return false;
- if (!this.topology_id.equals(that.topology_id))
- return false;
- }
-
- boolean this_present_task_metric_list = true && this.is_set_task_metric_list();
- boolean that_present_task_metric_list = true && that.is_set_task_metric_list();
- if (this_present_task_metric_list || that_present_task_metric_list) {
- if (!(this_present_task_metric_list && that_present_task_metric_list))
- return false;
- if (!this.task_metric_list.equals(that.task_metric_list))
- return false;
- }
-
- boolean this_present_worker_metric_list = true && this.is_set_worker_metric_list();
- boolean that_present_worker_metric_list = true && that.is_set_worker_metric_list();
- if (this_present_worker_metric_list || that_present_worker_metric_list) {
- if (!(this_present_worker_metric_list && that_present_worker_metric_list))
- return false;
- if (!this.worker_metric_list.equals(that.worker_metric_list))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_topology_id = true && (is_set_topology_id());
- builder.append(present_topology_id);
- if (present_topology_id)
- builder.append(topology_id);
-
- boolean present_task_metric_list = true && (is_set_task_metric_list());
- builder.append(present_task_metric_list);
- if (present_task_metric_list)
- builder.append(task_metric_list);
-
- boolean present_worker_metric_list = true && (is_set_worker_metric_list());
- builder.append(present_worker_metric_list);
- if (present_worker_metric_list)
- builder.append(worker_metric_list);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TopologyMetricInfo other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TopologyMetricInfo typedOther = (TopologyMetricInfo)other;
-
- lastComparison = Boolean.valueOf(is_set_topology_id()).compareTo(typedOther.is_set_topology_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_topology_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.topology_id, typedOther.topology_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_task_metric_list()).compareTo(typedOther.is_set_task_metric_list());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_task_metric_list()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.task_metric_list, typedOther.task_metric_list);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_worker_metric_list()).compareTo(typedOther.is_set_worker_metric_list());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_worker_metric_list()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.worker_metric_list, typedOther.worker_metric_list);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // TOPOLOGY_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.topology_id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // TASK_METRIC_LIST
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list259 = iprot.readListBegin();
- this.task_metric_list = new ArrayList<TaskMetricData>(_list259.size);
- for (int _i260 = 0; _i260 < _list259.size; ++_i260)
- {
- TaskMetricData _elem261; // required
- _elem261 = new TaskMetricData();
- _elem261.read(iprot);
- this.task_metric_list.add(_elem261);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // WORKER_METRIC_LIST
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list262 = iprot.readListBegin();
- this.worker_metric_list = new ArrayList<WorkerMetricData>(_list262.size);
- for (int _i263 = 0; _i263 < _list262.size; ++_i263)
- {
- WorkerMetricData _elem264; // required
- _elem264 = new WorkerMetricData();
- _elem264.read(iprot);
- this.worker_metric_list.add(_elem264);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.topology_id != null) {
- oprot.writeFieldBegin(TOPOLOGY_ID_FIELD_DESC);
- oprot.writeString(this.topology_id);
- oprot.writeFieldEnd();
- }
- if (this.task_metric_list != null) {
- if (is_set_task_metric_list()) {
- oprot.writeFieldBegin(TASK_METRIC_LIST_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.task_metric_list.size()));
- for (TaskMetricData _iter265 : this.task_metric_list)
- {
- _iter265.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- }
- if (this.worker_metric_list != null) {
- if (is_set_worker_metric_list()) {
- oprot.writeFieldBegin(WORKER_METRIC_LIST_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.worker_metric_list.size()));
- for (WorkerMetricData _iter266 : this.worker_metric_list)
- {
- _iter266.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TopologyMetricInfo(");
- boolean first = true;
-
- sb.append("topology_id:");
- if (this.topology_id == null) {
- sb.append("null");
- } else {
- sb.append(this.topology_id);
- }
- first = false;
- if (is_set_task_metric_list()) {
- if (!first) sb.append(", ");
- sb.append("task_metric_list:");
- if (this.task_metric_list == null) {
- sb.append("null");
- } else {
- sb.append(this.task_metric_list);
- }
- first = false;
- }
- if (is_set_worker_metric_list()) {
- if (!first) sb.append(", ");
- sb.append("worker_metric_list:");
- if (this.worker_metric_list == null) {
- sb.append("null");
- } else {
- sb.append(this.worker_metric_list);
- }
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_topology_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'topology_id' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[06/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/ComponentSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/ComponentSummary.java b/jstorm-core/src/main/java/backtype/storm/generated/ComponentSummary.java
new file mode 100644
index 0000000..8161f72
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/ComponentSummary.java
@@ -0,0 +1,903 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class ComponentSummary implements org.apache.thrift.TBase<ComponentSummary, ComponentSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentSummary> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentSummary");
+
+ private static final org.apache.thrift.protocol.TField NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("name", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField PARALLEL_FIELD_DESC = new org.apache.thrift.protocol.TField("parallel", org.apache.thrift.protocol.TType.I32, (short)2);
+ private static final org.apache.thrift.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("type", org.apache.thrift.protocol.TType.STRING, (short)3);
+ private static final org.apache.thrift.protocol.TField TASK_IDS_FIELD_DESC = new org.apache.thrift.protocol.TField("task_ids", org.apache.thrift.protocol.TType.LIST, (short)4);
+ private static final org.apache.thrift.protocol.TField ERRORS_FIELD_DESC = new org.apache.thrift.protocol.TField("errors", org.apache.thrift.protocol.TType.LIST, (short)5);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new ComponentSummaryStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new ComponentSummaryTupleSchemeFactory());
+ }
+
+ private String name; // required
+ private int parallel; // required
+ private String type; // required
+ private List<Integer> task_ids; // required
+ private List<ErrorInfo> errors; // optional
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ NAME((short)1, "name"),
+ PARALLEL((short)2, "parallel"),
+ TYPE((short)3, "type"),
+ TASK_IDS((short)4, "task_ids"),
+ ERRORS((short)5, "errors");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // NAME
+ return NAME;
+ case 2: // PARALLEL
+ return PARALLEL;
+ case 3: // TYPE
+ return TYPE;
+ case 4: // TASK_IDS
+ return TASK_IDS;
+ case 5: // ERRORS
+ return ERRORS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __PARALLEL_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ private static final _Fields optionals[] = {_Fields.ERRORS};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.NAME, new org.apache.thrift.meta_data.FieldMetaData("name", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.PARALLEL, new org.apache.thrift.meta_data.FieldMetaData("parallel", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ tmpMap.put(_Fields.TYPE, new org.apache.thrift.meta_data.FieldMetaData("type", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.TASK_IDS, new org.apache.thrift.meta_data.FieldMetaData("task_ids", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32))));
+ tmpMap.put(_Fields.ERRORS, new org.apache.thrift.meta_data.FieldMetaData("errors", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ErrorInfo.class))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ComponentSummary.class, metaDataMap);
+ }
+
+ public ComponentSummary() {
+ }
+
+ public ComponentSummary(
+ String name,
+ int parallel,
+ String type,
+ List<Integer> task_ids)
+ {
+ this();
+ this.name = name;
+ this.parallel = parallel;
+ set_parallel_isSet(true);
+ this.type = type;
+ this.task_ids = task_ids;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public ComponentSummary(ComponentSummary other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.is_set_name()) {
+ this.name = other.name;
+ }
+ this.parallel = other.parallel;
+ if (other.is_set_type()) {
+ this.type = other.type;
+ }
+ if (other.is_set_task_ids()) {
+ List<Integer> __this__task_ids = new ArrayList<Integer>(other.task_ids);
+ this.task_ids = __this__task_ids;
+ }
+ if (other.is_set_errors()) {
+ List<ErrorInfo> __this__errors = new ArrayList<ErrorInfo>(other.errors.size());
+ for (ErrorInfo other_element : other.errors) {
+ __this__errors.add(new ErrorInfo(other_element));
+ }
+ this.errors = __this__errors;
+ }
+ }
+
+ public ComponentSummary deepCopy() {
+ return new ComponentSummary(this);
+ }
+
+ @Override
+ public void clear() {
+ this.name = null;
+ set_parallel_isSet(false);
+ this.parallel = 0;
+ this.type = null;
+ this.task_ids = null;
+ this.errors = null;
+ }
+
+ public String get_name() {
+ return this.name;
+ }
+
+ public void set_name(String name) {
+ this.name = name;
+ }
+
+ public void unset_name() {
+ this.name = null;
+ }
+
+ /** Returns true if field name is set (has been assigned a value) and false otherwise */
+ public boolean is_set_name() {
+ return this.name != null;
+ }
+
+ public void set_name_isSet(boolean value) {
+ if (!value) {
+ this.name = null;
+ }
+ }
+
+ public int get_parallel() {
+ return this.parallel;
+ }
+
+ public void set_parallel(int parallel) {
+ this.parallel = parallel;
+ set_parallel_isSet(true);
+ }
+
+ public void unset_parallel() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PARALLEL_ISSET_ID);
+ }
+
+ /** Returns true if field parallel is set (has been assigned a value) and false otherwise */
+ public boolean is_set_parallel() {
+ return EncodingUtils.testBit(__isset_bitfield, __PARALLEL_ISSET_ID);
+ }
+
+ public void set_parallel_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PARALLEL_ISSET_ID, value);
+ }
+
+ public String get_type() {
+ return this.type;
+ }
+
+ public void set_type(String type) {
+ this.type = type;
+ }
+
+ public void unset_type() {
+ this.type = null;
+ }
+
+ /** Returns true if field type is set (has been assigned a value) and false otherwise */
+ public boolean is_set_type() {
+ return this.type != null;
+ }
+
+ public void set_type_isSet(boolean value) {
+ if (!value) {
+ this.type = null;
+ }
+ }
+
+ public int get_task_ids_size() {
+ return (this.task_ids == null) ? 0 : this.task_ids.size();
+ }
+
+ public java.util.Iterator<Integer> get_task_ids_iterator() {
+ return (this.task_ids == null) ? null : this.task_ids.iterator();
+ }
+
+ public void add_to_task_ids(int elem) {
+ if (this.task_ids == null) {
+ this.task_ids = new ArrayList<Integer>();
+ }
+ this.task_ids.add(elem);
+ }
+
+ public List<Integer> get_task_ids() {
+ return this.task_ids;
+ }
+
+ public void set_task_ids(List<Integer> task_ids) {
+ this.task_ids = task_ids;
+ }
+
+ public void unset_task_ids() {
+ this.task_ids = null;
+ }
+
+ /** Returns true if field task_ids is set (has been assigned a value) and false otherwise */
+ public boolean is_set_task_ids() {
+ return this.task_ids != null;
+ }
+
+ public void set_task_ids_isSet(boolean value) {
+ if (!value) {
+ this.task_ids = null;
+ }
+ }
+
+ public int get_errors_size() {
+ return (this.errors == null) ? 0 : this.errors.size();
+ }
+
+ public java.util.Iterator<ErrorInfo> get_errors_iterator() {
+ return (this.errors == null) ? null : this.errors.iterator();
+ }
+
+ public void add_to_errors(ErrorInfo elem) {
+ if (this.errors == null) {
+ this.errors = new ArrayList<ErrorInfo>();
+ }
+ this.errors.add(elem);
+ }
+
+ public List<ErrorInfo> get_errors() {
+ return this.errors;
+ }
+
+ public void set_errors(List<ErrorInfo> errors) {
+ this.errors = errors;
+ }
+
+ public void unset_errors() {
+ this.errors = null;
+ }
+
+ /** Returns true if field errors is set (has been assigned a value) and false otherwise */
+ public boolean is_set_errors() {
+ return this.errors != null;
+ }
+
+ public void set_errors_isSet(boolean value) {
+ if (!value) {
+ this.errors = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case NAME:
+ if (value == null) {
+ unset_name();
+ } else {
+ set_name((String)value);
+ }
+ break;
+
+ case PARALLEL:
+ if (value == null) {
+ unset_parallel();
+ } else {
+ set_parallel((Integer)value);
+ }
+ break;
+
+ case TYPE:
+ if (value == null) {
+ unset_type();
+ } else {
+ set_type((String)value);
+ }
+ break;
+
+ case TASK_IDS:
+ if (value == null) {
+ unset_task_ids();
+ } else {
+ set_task_ids((List<Integer>)value);
+ }
+ break;
+
+ case ERRORS:
+ if (value == null) {
+ unset_errors();
+ } else {
+ set_errors((List<ErrorInfo>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case NAME:
+ return get_name();
+
+ case PARALLEL:
+ return Integer.valueOf(get_parallel());
+
+ case TYPE:
+ return get_type();
+
+ case TASK_IDS:
+ return get_task_ids();
+
+ case ERRORS:
+ return get_errors();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case NAME:
+ return is_set_name();
+ case PARALLEL:
+ return is_set_parallel();
+ case TYPE:
+ return is_set_type();
+ case TASK_IDS:
+ return is_set_task_ids();
+ case ERRORS:
+ return is_set_errors();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof ComponentSummary)
+ return this.equals((ComponentSummary)that);
+ return false;
+ }
+
+ public boolean equals(ComponentSummary that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_name = true && this.is_set_name();
+ boolean that_present_name = true && that.is_set_name();
+ if (this_present_name || that_present_name) {
+ if (!(this_present_name && that_present_name))
+ return false;
+ if (!this.name.equals(that.name))
+ return false;
+ }
+
+ boolean this_present_parallel = true;
+ boolean that_present_parallel = true;
+ if (this_present_parallel || that_present_parallel) {
+ if (!(this_present_parallel && that_present_parallel))
+ return false;
+ if (this.parallel != that.parallel)
+ return false;
+ }
+
+ boolean this_present_type = true && this.is_set_type();
+ boolean that_present_type = true && that.is_set_type();
+ if (this_present_type || that_present_type) {
+ if (!(this_present_type && that_present_type))
+ return false;
+ if (!this.type.equals(that.type))
+ return false;
+ }
+
+ boolean this_present_task_ids = true && this.is_set_task_ids();
+ boolean that_present_task_ids = true && that.is_set_task_ids();
+ if (this_present_task_ids || that_present_task_ids) {
+ if (!(this_present_task_ids && that_present_task_ids))
+ return false;
+ if (!this.task_ids.equals(that.task_ids))
+ return false;
+ }
+
+ boolean this_present_errors = true && this.is_set_errors();
+ boolean that_present_errors = true && that.is_set_errors();
+ if (this_present_errors || that_present_errors) {
+ if (!(this_present_errors && that_present_errors))
+ return false;
+ if (!this.errors.equals(that.errors))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_name = true && (is_set_name());
+ list.add(present_name);
+ if (present_name)
+ list.add(name);
+
+ boolean present_parallel = true;
+ list.add(present_parallel);
+ if (present_parallel)
+ list.add(parallel);
+
+ boolean present_type = true && (is_set_type());
+ list.add(present_type);
+ if (present_type)
+ list.add(type);
+
+ boolean present_task_ids = true && (is_set_task_ids());
+ list.add(present_task_ids);
+ if (present_task_ids)
+ list.add(task_ids);
+
+ boolean present_errors = true && (is_set_errors());
+ list.add(present_errors);
+ if (present_errors)
+ list.add(errors);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(ComponentSummary other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_name()).compareTo(other.is_set_name());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_name()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.name, other.name);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_parallel()).compareTo(other.is_set_parallel());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_parallel()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parallel, other.parallel);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_type()).compareTo(other.is_set_type());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_type()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.type, other.type);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_task_ids()).compareTo(other.is_set_task_ids());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_task_ids()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.task_ids, other.task_ids);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_errors()).compareTo(other.is_set_errors());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_errors()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.errors, other.errors);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ComponentSummary(");
+ boolean first = true;
+
+ sb.append("name:");
+ if (this.name == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.name);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("parallel:");
+ sb.append(this.parallel);
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("type:");
+ if (this.type == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.type);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("task_ids:");
+ if (this.task_ids == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.task_ids);
+ }
+ first = false;
+ if (is_set_errors()) {
+ if (!first) sb.append(", ");
+ sb.append("errors:");
+ if (this.errors == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.errors);
+ }
+ first = false;
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_name()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_parallel()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'parallel' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_type()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'type' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_task_ids()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'task_ids' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class ComponentSummaryStandardSchemeFactory implements SchemeFactory {
+ public ComponentSummaryStandardScheme getScheme() {
+ return new ComponentSummaryStandardScheme();
+ }
+ }
+
+ private static class ComponentSummaryStandardScheme extends StandardScheme<ComponentSummary> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ComponentSummary struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // NAME
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.name = iprot.readString();
+ struct.set_name_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // PARALLEL
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.parallel = iprot.readI32();
+ struct.set_parallel_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // TYPE
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.type = iprot.readString();
+ struct.set_type_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 4: // TASK_IDS
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list214 = iprot.readListBegin();
+ struct.task_ids = new ArrayList<Integer>(_list214.size);
+ int _elem215;
+ for (int _i216 = 0; _i216 < _list214.size; ++_i216)
+ {
+ _elem215 = iprot.readI32();
+ struct.task_ids.add(_elem215);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_task_ids_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 5: // ERRORS
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list217 = iprot.readListBegin();
+ struct.errors = new ArrayList<ErrorInfo>(_list217.size);
+ ErrorInfo _elem218;
+ for (int _i219 = 0; _i219 < _list217.size; ++_i219)
+ {
+ _elem218 = new ErrorInfo();
+ _elem218.read(iprot);
+ struct.errors.add(_elem218);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_errors_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ComponentSummary struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.name != null) {
+ oprot.writeFieldBegin(NAME_FIELD_DESC);
+ oprot.writeString(struct.name);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldBegin(PARALLEL_FIELD_DESC);
+ oprot.writeI32(struct.parallel);
+ oprot.writeFieldEnd();
+ if (struct.type != null) {
+ oprot.writeFieldBegin(TYPE_FIELD_DESC);
+ oprot.writeString(struct.type);
+ oprot.writeFieldEnd();
+ }
+ if (struct.task_ids != null) {
+ oprot.writeFieldBegin(TASK_IDS_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, struct.task_ids.size()));
+ for (int _iter220 : struct.task_ids)
+ {
+ oprot.writeI32(_iter220);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.errors != null) {
+ if (struct.is_set_errors()) {
+ oprot.writeFieldBegin(ERRORS_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.errors.size()));
+ for (ErrorInfo _iter221 : struct.errors)
+ {
+ _iter221.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class ComponentSummaryTupleSchemeFactory implements SchemeFactory {
+ public ComponentSummaryTupleScheme getScheme() {
+ return new ComponentSummaryTupleScheme();
+ }
+ }
+
+ private static class ComponentSummaryTupleScheme extends TupleScheme<ComponentSummary> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, ComponentSummary struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.name);
+ oprot.writeI32(struct.parallel);
+ oprot.writeString(struct.type);
+ {
+ oprot.writeI32(struct.task_ids.size());
+ for (int _iter222 : struct.task_ids)
+ {
+ oprot.writeI32(_iter222);
+ }
+ }
+ BitSet optionals = new BitSet();
+ if (struct.is_set_errors()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_errors()) {
+ {
+ oprot.writeI32(struct.errors.size());
+ for (ErrorInfo _iter223 : struct.errors)
+ {
+ _iter223.write(oprot);
+ }
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, ComponentSummary struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.name = iprot.readString();
+ struct.set_name_isSet(true);
+ struct.parallel = iprot.readI32();
+ struct.set_parallel_isSet(true);
+ struct.type = iprot.readString();
+ struct.set_type_isSet(true);
+ {
+ org.apache.thrift.protocol.TList _list224 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I32, iprot.readI32());
+ struct.task_ids = new ArrayList<Integer>(_list224.size);
+ int _elem225;
+ for (int _i226 = 0; _i226 < _list224.size; ++_i226)
+ {
+ _elem225 = iprot.readI32();
+ struct.task_ids.add(_elem225);
+ }
+ }
+ struct.set_task_ids_isSet(true);
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ {
+ org.apache.thrift.protocol.TList _list227 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.errors = new ArrayList<ErrorInfo>(_list227.size);
+ ErrorInfo _elem228;
+ for (int _i229 = 0; _i229 < _list227.size; ++_i229)
+ {
+ _elem228 = new ErrorInfo();
+ _elem228.read(iprot);
+ struct.errors.add(_elem228);
+ }
+ }
+ struct.set_errors_isSet(true);
+ }
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/Credentials.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/Credentials.java b/jstorm-core/src/main/java/backtype/storm/generated/Credentials.java
new file mode 100644
index 0000000..e2ca92d
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/Credentials.java
@@ -0,0 +1,441 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class Credentials implements org.apache.thrift.TBase<Credentials, Credentials._Fields>, java.io.Serializable, Cloneable, Comparable<Credentials> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Credentials");
+
+ private static final org.apache.thrift.protocol.TField CREDS_FIELD_DESC = new org.apache.thrift.protocol.TField("creds", org.apache.thrift.protocol.TType.MAP, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new CredentialsStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new CredentialsTupleSchemeFactory());
+ }
+
+ private Map<String,String> creds; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ CREDS((short)1, "creds");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // CREDS
+ return CREDS;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.CREDS, new org.apache.thrift.meta_data.FieldMetaData("creds", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Credentials.class, metaDataMap);
+ }
+
+ public Credentials() {
+ }
+
+ public Credentials(
+ Map<String,String> creds)
+ {
+ this();
+ this.creds = creds;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public Credentials(Credentials other) {
+ if (other.is_set_creds()) {
+ Map<String,String> __this__creds = new HashMap<String,String>(other.creds);
+ this.creds = __this__creds;
+ }
+ }
+
+ public Credentials deepCopy() {
+ return new Credentials(this);
+ }
+
+ @Override
+ public void clear() {
+ this.creds = null;
+ }
+
+ public int get_creds_size() {
+ return (this.creds == null) ? 0 : this.creds.size();
+ }
+
+ public void put_to_creds(String key, String val) {
+ if (this.creds == null) {
+ this.creds = new HashMap<String,String>();
+ }
+ this.creds.put(key, val);
+ }
+
+ public Map<String,String> get_creds() {
+ return this.creds;
+ }
+
+ public void set_creds(Map<String,String> creds) {
+ this.creds = creds;
+ }
+
+ public void unset_creds() {
+ this.creds = null;
+ }
+
+ /** Returns true if field creds is set (has been assigned a value) and false otherwise */
+ public boolean is_set_creds() {
+ return this.creds != null;
+ }
+
+ public void set_creds_isSet(boolean value) {
+ if (!value) {
+ this.creds = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case CREDS:
+ if (value == null) {
+ unset_creds();
+ } else {
+ set_creds((Map<String,String>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case CREDS:
+ return get_creds();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case CREDS:
+ return is_set_creds();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof Credentials)
+ return this.equals((Credentials)that);
+ return false;
+ }
+
+ public boolean equals(Credentials that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_creds = true && this.is_set_creds();
+ boolean that_present_creds = true && that.is_set_creds();
+ if (this_present_creds || that_present_creds) {
+ if (!(this_present_creds && that_present_creds))
+ return false;
+ if (!this.creds.equals(that.creds))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_creds = true && (is_set_creds());
+ list.add(present_creds);
+ if (present_creds)
+ list.add(creds);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(Credentials other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_creds()).compareTo(other.is_set_creds());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_creds()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.creds, other.creds);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("Credentials(");
+ boolean first = true;
+
+ sb.append("creds:");
+ if (this.creds == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.creds);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_creds()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'creds' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class CredentialsStandardSchemeFactory implements SchemeFactory {
+ public CredentialsStandardScheme getScheme() {
+ return new CredentialsStandardScheme();
+ }
+ }
+
+ private static class CredentialsStandardScheme extends StandardScheme<Credentials> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, Credentials struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // CREDS
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map254 = iprot.readMapBegin();
+ struct.creds = new HashMap<String,String>(2*_map254.size);
+ String _key255;
+ String _val256;
+ for (int _i257 = 0; _i257 < _map254.size; ++_i257)
+ {
+ _key255 = iprot.readString();
+ _val256 = iprot.readString();
+ struct.creds.put(_key255, _val256);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_creds_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, Credentials struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.creds != null) {
+ oprot.writeFieldBegin(CREDS_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.creds.size()));
+ for (Map.Entry<String, String> _iter258 : struct.creds.entrySet())
+ {
+ oprot.writeString(_iter258.getKey());
+ oprot.writeString(_iter258.getValue());
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class CredentialsTupleSchemeFactory implements SchemeFactory {
+ public CredentialsTupleScheme getScheme() {
+ return new CredentialsTupleScheme();
+ }
+ }
+
+ private static class CredentialsTupleScheme extends TupleScheme<Credentials> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, Credentials struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ {
+ oprot.writeI32(struct.creds.size());
+ for (Map.Entry<String, String> _iter259 : struct.creds.entrySet())
+ {
+ oprot.writeString(_iter259.getKey());
+ oprot.writeString(_iter259.getValue());
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, Credentials struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ {
+ org.apache.thrift.protocol.TMap _map260 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.creds = new HashMap<String,String>(2*_map260.size);
+ String _key261;
+ String _val262;
+ for (int _i263 = 0; _i263 < _map260.size; ++_i263)
+ {
+ _key261 = iprot.readString();
+ _val262 = iprot.readString();
+ struct.creds.put(_key261, _val262);
+ }
+ }
+ struct.set_creds_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/DRPCExecutionException.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/DRPCExecutionException.java b/jstorm-core/src/main/java/backtype/storm/generated/DRPCExecutionException.java
new file mode 100644
index 0000000..3d8502f
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/DRPCExecutionException.java
@@ -0,0 +1,389 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class DRPCExecutionException extends TException implements org.apache.thrift.TBase<DRPCExecutionException, DRPCExecutionException._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCExecutionException> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCExecutionException");
+
+ private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new DRPCExecutionExceptionStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new DRPCExecutionExceptionTupleSchemeFactory());
+ }
+
+ private String msg; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ MSG((short)1, "msg");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // MSG
+ return MSG;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DRPCExecutionException.class, metaDataMap);
+ }
+
+ public DRPCExecutionException() {
+ }
+
+ public DRPCExecutionException(
+ String msg)
+ {
+ this();
+ this.msg = msg;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public DRPCExecutionException(DRPCExecutionException other) {
+ if (other.is_set_msg()) {
+ this.msg = other.msg;
+ }
+ }
+
+ public DRPCExecutionException deepCopy() {
+ return new DRPCExecutionException(this);
+ }
+
+ @Override
+ public void clear() {
+ this.msg = null;
+ }
+
+ public String get_msg() {
+ return this.msg;
+ }
+
+ public void set_msg(String msg) {
+ this.msg = msg;
+ }
+
+ public void unset_msg() {
+ this.msg = null;
+ }
+
+ /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+ public boolean is_set_msg() {
+ return this.msg != null;
+ }
+
+ public void set_msg_isSet(boolean value) {
+ if (!value) {
+ this.msg = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case MSG:
+ if (value == null) {
+ unset_msg();
+ } else {
+ set_msg((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case MSG:
+ return get_msg();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case MSG:
+ return is_set_msg();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof DRPCExecutionException)
+ return this.equals((DRPCExecutionException)that);
+ return false;
+ }
+
+ public boolean equals(DRPCExecutionException that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_msg = true && this.is_set_msg();
+ boolean that_present_msg = true && that.is_set_msg();
+ if (this_present_msg || that_present_msg) {
+ if (!(this_present_msg && that_present_msg))
+ return false;
+ if (!this.msg.equals(that.msg))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_msg = true && (is_set_msg());
+ list.add(present_msg);
+ if (present_msg)
+ list.add(msg);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(DRPCExecutionException other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_msg()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("DRPCExecutionException(");
+ boolean first = true;
+
+ sb.append("msg:");
+ if (this.msg == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.msg);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_msg()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class DRPCExecutionExceptionStandardSchemeFactory implements SchemeFactory {
+ public DRPCExecutionExceptionStandardScheme getScheme() {
+ return new DRPCExecutionExceptionStandardScheme();
+ }
+ }
+
+ private static class DRPCExecutionExceptionStandardScheme extends StandardScheme<DRPCExecutionException> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, DRPCExecutionException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // MSG
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, DRPCExecutionException struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.msg != null) {
+ oprot.writeFieldBegin(MSG_FIELD_DESC);
+ oprot.writeString(struct.msg);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class DRPCExecutionExceptionTupleSchemeFactory implements SchemeFactory {
+ public DRPCExecutionExceptionTupleScheme getScheme() {
+ return new DRPCExecutionExceptionTupleScheme();
+ }
+ }
+
+ private static class DRPCExecutionExceptionTupleScheme extends TupleScheme<DRPCExecutionException> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, DRPCExecutionException struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.msg);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, DRPCExecutionException struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/DRPCRequest.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/DRPCRequest.java b/jstorm-core/src/main/java/backtype/storm/generated/DRPCRequest.java
new file mode 100644
index 0000000..00448f5
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/DRPCRequest.java
@@ -0,0 +1,490 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class DRPCRequest implements org.apache.thrift.TBase<DRPCRequest, DRPCRequest._Fields>, java.io.Serializable, Cloneable, Comparable<DRPCRequest> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("DRPCRequest");
+
+ private static final org.apache.thrift.protocol.TField FUNC_ARGS_FIELD_DESC = new org.apache.thrift.protocol.TField("func_args", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField REQUEST_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("request_id", org.apache.thrift.protocol.TType.STRING, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new DRPCRequestStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new DRPCRequestTupleSchemeFactory());
+ }
+
+ private String func_args; // required
+ private String request_id; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ FUNC_ARGS((short)1, "func_args"),
+ REQUEST_ID((short)2, "request_id");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // FUNC_ARGS
+ return FUNC_ARGS;
+ case 2: // REQUEST_ID
+ return REQUEST_ID;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.FUNC_ARGS, new org.apache.thrift.meta_data.FieldMetaData("func_args", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ tmpMap.put(_Fields.REQUEST_ID, new org.apache.thrift.meta_data.FieldMetaData("request_id", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(DRPCRequest.class, metaDataMap);
+ }
+
+ public DRPCRequest() {
+ }
+
+ public DRPCRequest(
+ String func_args,
+ String request_id)
+ {
+ this();
+ this.func_args = func_args;
+ this.request_id = request_id;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public DRPCRequest(DRPCRequest other) {
+ if (other.is_set_func_args()) {
+ this.func_args = other.func_args;
+ }
+ if (other.is_set_request_id()) {
+ this.request_id = other.request_id;
+ }
+ }
+
+ public DRPCRequest deepCopy() {
+ return new DRPCRequest(this);
+ }
+
+ @Override
+ public void clear() {
+ this.func_args = null;
+ this.request_id = null;
+ }
+
+ public String get_func_args() {
+ return this.func_args;
+ }
+
+ public void set_func_args(String func_args) {
+ this.func_args = func_args;
+ }
+
+ public void unset_func_args() {
+ this.func_args = null;
+ }
+
+ /** Returns true if field func_args is set (has been assigned a value) and false otherwise */
+ public boolean is_set_func_args() {
+ return this.func_args != null;
+ }
+
+ public void set_func_args_isSet(boolean value) {
+ if (!value) {
+ this.func_args = null;
+ }
+ }
+
+ public String get_request_id() {
+ return this.request_id;
+ }
+
+ public void set_request_id(String request_id) {
+ this.request_id = request_id;
+ }
+
+ public void unset_request_id() {
+ this.request_id = null;
+ }
+
+ /** Returns true if field request_id is set (has been assigned a value) and false otherwise */
+ public boolean is_set_request_id() {
+ return this.request_id != null;
+ }
+
+ public void set_request_id_isSet(boolean value) {
+ if (!value) {
+ this.request_id = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case FUNC_ARGS:
+ if (value == null) {
+ unset_func_args();
+ } else {
+ set_func_args((String)value);
+ }
+ break;
+
+ case REQUEST_ID:
+ if (value == null) {
+ unset_request_id();
+ } else {
+ set_request_id((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case FUNC_ARGS:
+ return get_func_args();
+
+ case REQUEST_ID:
+ return get_request_id();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case FUNC_ARGS:
+ return is_set_func_args();
+ case REQUEST_ID:
+ return is_set_request_id();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof DRPCRequest)
+ return this.equals((DRPCRequest)that);
+ return false;
+ }
+
+ public boolean equals(DRPCRequest that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_func_args = true && this.is_set_func_args();
+ boolean that_present_func_args = true && that.is_set_func_args();
+ if (this_present_func_args || that_present_func_args) {
+ if (!(this_present_func_args && that_present_func_args))
+ return false;
+ if (!this.func_args.equals(that.func_args))
+ return false;
+ }
+
+ boolean this_present_request_id = true && this.is_set_request_id();
+ boolean that_present_request_id = true && that.is_set_request_id();
+ if (this_present_request_id || that_present_request_id) {
+ if (!(this_present_request_id && that_present_request_id))
+ return false;
+ if (!this.request_id.equals(that.request_id))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_func_args = true && (is_set_func_args());
+ list.add(present_func_args);
+ if (present_func_args)
+ list.add(func_args);
+
+ boolean present_request_id = true && (is_set_request_id());
+ list.add(present_request_id);
+ if (present_request_id)
+ list.add(request_id);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(DRPCRequest other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_func_args()).compareTo(other.is_set_func_args());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_func_args()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.func_args, other.func_args);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_request_id()).compareTo(other.is_set_request_id());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_request_id()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.request_id, other.request_id);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("DRPCRequest(");
+ boolean first = true;
+
+ sb.append("func_args:");
+ if (this.func_args == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.func_args);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("request_id:");
+ if (this.request_id == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.request_id);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_func_args()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'func_args' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_request_id()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'request_id' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class DRPCRequestStandardSchemeFactory implements SchemeFactory {
+ public DRPCRequestStandardScheme getScheme() {
+ return new DRPCRequestStandardScheme();
+ }
+ }
+
+ private static class DRPCRequestStandardScheme extends StandardScheme<DRPCRequest> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, DRPCRequest struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // FUNC_ARGS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.func_args = iprot.readString();
+ struct.set_func_args_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // REQUEST_ID
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.request_id = iprot.readString();
+ struct.set_request_id_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, DRPCRequest struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.func_args != null) {
+ oprot.writeFieldBegin(FUNC_ARGS_FIELD_DESC);
+ oprot.writeString(struct.func_args);
+ oprot.writeFieldEnd();
+ }
+ if (struct.request_id != null) {
+ oprot.writeFieldBegin(REQUEST_ID_FIELD_DESC);
+ oprot.writeString(struct.request_id);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class DRPCRequestTupleSchemeFactory implements SchemeFactory {
+ public DRPCRequestTupleScheme getScheme() {
+ return new DRPCRequestTupleScheme();
+ }
+ }
+
+ private static class DRPCRequestTupleScheme extends TupleScheme<DRPCRequest> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, DRPCRequest struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.func_args);
+ oprot.writeString(struct.request_id);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, DRPCRequest struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.func_args = iprot.readString();
+ struct.set_func_args_isSet(true);
+ struct.request_id = iprot.readString();
+ struct.set_request_id_isSet(true);
+ }
+ }
+
+}
+
[32/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TaskStats.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TaskStats.java b/jstorm-client/src/main/java/backtype/storm/generated/TaskStats.java
deleted file mode 100644
index da38eb0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TaskStats.java
+++ /dev/null
@@ -1,1285 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TaskStats implements org.apache.thrift7.TBase<TaskStats, TaskStats._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TaskStats");
-
- private static final org.apache.thrift7.protocol.TField EMITTED_FIELD_DESC = new org.apache.thrift7.protocol.TField("emitted", org.apache.thrift7.protocol.TType.MAP, (short)1);
- private static final org.apache.thrift7.protocol.TField SEND_TPS_FIELD_DESC = new org.apache.thrift7.protocol.TField("send_tps", org.apache.thrift7.protocol.TType.MAP, (short)2);
- private static final org.apache.thrift7.protocol.TField RECV_TPS_FIELD_DESC = new org.apache.thrift7.protocol.TField("recv_tps", org.apache.thrift7.protocol.TType.MAP, (short)3);
- private static final org.apache.thrift7.protocol.TField ACKED_FIELD_DESC = new org.apache.thrift7.protocol.TField("acked", org.apache.thrift7.protocol.TType.MAP, (short)4);
- private static final org.apache.thrift7.protocol.TField FAILED_FIELD_DESC = new org.apache.thrift7.protocol.TField("failed", org.apache.thrift7.protocol.TType.MAP, (short)5);
- private static final org.apache.thrift7.protocol.TField PROCESS_MS_AVG_FIELD_DESC = new org.apache.thrift7.protocol.TField("process_ms_avg", org.apache.thrift7.protocol.TType.MAP, (short)6);
-
- private Map<String,Map<String,Long>> emitted; // required
- private Map<String,Map<String,Double>> send_tps; // required
- private Map<String,Map<GlobalStreamId,Double>> recv_tps; // required
- private Map<String,Map<GlobalStreamId,Long>> acked; // required
- private Map<String,Map<GlobalStreamId,Long>> failed; // required
- private Map<String,Map<GlobalStreamId,Double>> process_ms_avg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- EMITTED((short)1, "emitted"),
- SEND_TPS((short)2, "send_tps"),
- RECV_TPS((short)3, "recv_tps"),
- ACKED((short)4, "acked"),
- FAILED((short)5, "failed"),
- PROCESS_MS_AVG((short)6, "process_ms_avg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // EMITTED
- return EMITTED;
- case 2: // SEND_TPS
- return SEND_TPS;
- case 3: // RECV_TPS
- return RECV_TPS;
- case 4: // ACKED
- return ACKED;
- case 5: // FAILED
- return FAILED;
- case 6: // PROCESS_MS_AVG
- return PROCESS_MS_AVG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.EMITTED, new org.apache.thrift7.meta_data.FieldMetaData("emitted", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.SEND_TPS, new org.apache.thrift7.meta_data.FieldMetaData("send_tps", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- tmpMap.put(_Fields.RECV_TPS, new org.apache.thrift7.meta_data.FieldMetaData("recv_tps", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- tmpMap.put(_Fields.ACKED, new org.apache.thrift7.meta_data.FieldMetaData("acked", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.FAILED, new org.apache.thrift7.meta_data.FieldMetaData("failed", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.PROCESS_MS_AVG, new org.apache.thrift7.meta_data.FieldMetaData("process_ms_avg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TaskStats.class, metaDataMap);
- }
-
- public TaskStats() {
- }
-
- public TaskStats(
- Map<String,Map<String,Long>> emitted,
- Map<String,Map<String,Double>> send_tps,
- Map<String,Map<GlobalStreamId,Double>> recv_tps,
- Map<String,Map<GlobalStreamId,Long>> acked,
- Map<String,Map<GlobalStreamId,Long>> failed,
- Map<String,Map<GlobalStreamId,Double>> process_ms_avg)
- {
- this();
- this.emitted = emitted;
- this.send_tps = send_tps;
- this.recv_tps = recv_tps;
- this.acked = acked;
- this.failed = failed;
- this.process_ms_avg = process_ms_avg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TaskStats(TaskStats other) {
- if (other.is_set_emitted()) {
- Map<String,Map<String,Long>> __this__emitted = new HashMap<String,Map<String,Long>>();
- for (Map.Entry<String, Map<String,Long>> other_element : other.emitted.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<String,Long> other_element_value = other_element.getValue();
-
- String __this__emitted_copy_key = other_element_key;
-
- Map<String,Long> __this__emitted_copy_value = new HashMap<String,Long>();
- for (Map.Entry<String, Long> other_element_value_element : other_element_value.entrySet()) {
-
- String other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- String __this__emitted_copy_value_copy_key = other_element_value_element_key;
-
- Long __this__emitted_copy_value_copy_value = other_element_value_element_value;
-
- __this__emitted_copy_value.put(__this__emitted_copy_value_copy_key, __this__emitted_copy_value_copy_value);
- }
-
- __this__emitted.put(__this__emitted_copy_key, __this__emitted_copy_value);
- }
- this.emitted = __this__emitted;
- }
- if (other.is_set_send_tps()) {
- Map<String,Map<String,Double>> __this__send_tps = new HashMap<String,Map<String,Double>>();
- for (Map.Entry<String, Map<String,Double>> other_element : other.send_tps.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<String,Double> other_element_value = other_element.getValue();
-
- String __this__send_tps_copy_key = other_element_key;
-
- Map<String,Double> __this__send_tps_copy_value = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element_value_element : other_element_value.entrySet()) {
-
- String other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- String __this__send_tps_copy_value_copy_key = other_element_value_element_key;
-
- Double __this__send_tps_copy_value_copy_value = other_element_value_element_value;
-
- __this__send_tps_copy_value.put(__this__send_tps_copy_value_copy_key, __this__send_tps_copy_value_copy_value);
- }
-
- __this__send_tps.put(__this__send_tps_copy_key, __this__send_tps_copy_value);
- }
- this.send_tps = __this__send_tps;
- }
- if (other.is_set_recv_tps()) {
- Map<String,Map<GlobalStreamId,Double>> __this__recv_tps = new HashMap<String,Map<GlobalStreamId,Double>>();
- for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.recv_tps.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Double> other_element_value = other_element.getValue();
-
- String __this__recv_tps_copy_key = other_element_key;
-
- Map<GlobalStreamId,Double> __this__recv_tps_copy_value = new HashMap<GlobalStreamId,Double>();
- for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__recv_tps_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Double __this__recv_tps_copy_value_copy_value = other_element_value_element_value;
-
- __this__recv_tps_copy_value.put(__this__recv_tps_copy_value_copy_key, __this__recv_tps_copy_value_copy_value);
- }
-
- __this__recv_tps.put(__this__recv_tps_copy_key, __this__recv_tps_copy_value);
- }
- this.recv_tps = __this__recv_tps;
- }
- if (other.is_set_acked()) {
- Map<String,Map<GlobalStreamId,Long>> __this__acked = new HashMap<String,Map<GlobalStreamId,Long>>();
- for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.acked.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Long> other_element_value = other_element.getValue();
-
- String __this__acked_copy_key = other_element_key;
-
- Map<GlobalStreamId,Long> __this__acked_copy_value = new HashMap<GlobalStreamId,Long>();
- for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__acked_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Long __this__acked_copy_value_copy_value = other_element_value_element_value;
-
- __this__acked_copy_value.put(__this__acked_copy_value_copy_key, __this__acked_copy_value_copy_value);
- }
-
- __this__acked.put(__this__acked_copy_key, __this__acked_copy_value);
- }
- this.acked = __this__acked;
- }
- if (other.is_set_failed()) {
- Map<String,Map<GlobalStreamId,Long>> __this__failed = new HashMap<String,Map<GlobalStreamId,Long>>();
- for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.failed.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Long> other_element_value = other_element.getValue();
-
- String __this__failed_copy_key = other_element_key;
-
- Map<GlobalStreamId,Long> __this__failed_copy_value = new HashMap<GlobalStreamId,Long>();
- for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__failed_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Long __this__failed_copy_value_copy_value = other_element_value_element_value;
-
- __this__failed_copy_value.put(__this__failed_copy_value_copy_key, __this__failed_copy_value_copy_value);
- }
-
- __this__failed.put(__this__failed_copy_key, __this__failed_copy_value);
- }
- this.failed = __this__failed;
- }
- if (other.is_set_process_ms_avg()) {
- Map<String,Map<GlobalStreamId,Double>> __this__process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.process_ms_avg.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Double> other_element_value = other_element.getValue();
-
- String __this__process_ms_avg_copy_key = other_element_key;
-
- Map<GlobalStreamId,Double> __this__process_ms_avg_copy_value = new HashMap<GlobalStreamId,Double>();
- for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__process_ms_avg_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Double __this__process_ms_avg_copy_value_copy_value = other_element_value_element_value;
-
- __this__process_ms_avg_copy_value.put(__this__process_ms_avg_copy_value_copy_key, __this__process_ms_avg_copy_value_copy_value);
- }
-
- __this__process_ms_avg.put(__this__process_ms_avg_copy_key, __this__process_ms_avg_copy_value);
- }
- this.process_ms_avg = __this__process_ms_avg;
- }
- }
-
- public TaskStats deepCopy() {
- return new TaskStats(this);
- }
-
- @Override
- public void clear() {
- this.emitted = null;
- this.send_tps = null;
- this.recv_tps = null;
- this.acked = null;
- this.failed = null;
- this.process_ms_avg = null;
- }
-
- public int get_emitted_size() {
- return (this.emitted == null) ? 0 : this.emitted.size();
- }
-
- public void put_to_emitted(String key, Map<String,Long> val) {
- if (this.emitted == null) {
- this.emitted = new HashMap<String,Map<String,Long>>();
- }
- this.emitted.put(key, val);
- }
-
- public Map<String,Map<String,Long>> get_emitted() {
- return this.emitted;
- }
-
- public void set_emitted(Map<String,Map<String,Long>> emitted) {
- this.emitted = emitted;
- }
-
- public void unset_emitted() {
- this.emitted = null;
- }
-
- /** Returns true if field emitted is set (has been assigned a value) and false otherwise */
- public boolean is_set_emitted() {
- return this.emitted != null;
- }
-
- public void set_emitted_isSet(boolean value) {
- if (!value) {
- this.emitted = null;
- }
- }
-
- public int get_send_tps_size() {
- return (this.send_tps == null) ? 0 : this.send_tps.size();
- }
-
- public void put_to_send_tps(String key, Map<String,Double> val) {
- if (this.send_tps == null) {
- this.send_tps = new HashMap<String,Map<String,Double>>();
- }
- this.send_tps.put(key, val);
- }
-
- public Map<String,Map<String,Double>> get_send_tps() {
- return this.send_tps;
- }
-
- public void set_send_tps(Map<String,Map<String,Double>> send_tps) {
- this.send_tps = send_tps;
- }
-
- public void unset_send_tps() {
- this.send_tps = null;
- }
-
- /** Returns true if field send_tps is set (has been assigned a value) and false otherwise */
- public boolean is_set_send_tps() {
- return this.send_tps != null;
- }
-
- public void set_send_tps_isSet(boolean value) {
- if (!value) {
- this.send_tps = null;
- }
- }
-
- public int get_recv_tps_size() {
- return (this.recv_tps == null) ? 0 : this.recv_tps.size();
- }
-
- public void put_to_recv_tps(String key, Map<GlobalStreamId,Double> val) {
- if (this.recv_tps == null) {
- this.recv_tps = new HashMap<String,Map<GlobalStreamId,Double>>();
- }
- this.recv_tps.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Double>> get_recv_tps() {
- return this.recv_tps;
- }
-
- public void set_recv_tps(Map<String,Map<GlobalStreamId,Double>> recv_tps) {
- this.recv_tps = recv_tps;
- }
-
- public void unset_recv_tps() {
- this.recv_tps = null;
- }
-
- /** Returns true if field recv_tps is set (has been assigned a value) and false otherwise */
- public boolean is_set_recv_tps() {
- return this.recv_tps != null;
- }
-
- public void set_recv_tps_isSet(boolean value) {
- if (!value) {
- this.recv_tps = null;
- }
- }
-
- public int get_acked_size() {
- return (this.acked == null) ? 0 : this.acked.size();
- }
-
- public void put_to_acked(String key, Map<GlobalStreamId,Long> val) {
- if (this.acked == null) {
- this.acked = new HashMap<String,Map<GlobalStreamId,Long>>();
- }
- this.acked.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Long>> get_acked() {
- return this.acked;
- }
-
- public void set_acked(Map<String,Map<GlobalStreamId,Long>> acked) {
- this.acked = acked;
- }
-
- public void unset_acked() {
- this.acked = null;
- }
-
- /** Returns true if field acked is set (has been assigned a value) and false otherwise */
- public boolean is_set_acked() {
- return this.acked != null;
- }
-
- public void set_acked_isSet(boolean value) {
- if (!value) {
- this.acked = null;
- }
- }
-
- public int get_failed_size() {
- return (this.failed == null) ? 0 : this.failed.size();
- }
-
- public void put_to_failed(String key, Map<GlobalStreamId,Long> val) {
- if (this.failed == null) {
- this.failed = new HashMap<String,Map<GlobalStreamId,Long>>();
- }
- this.failed.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Long>> get_failed() {
- return this.failed;
- }
-
- public void set_failed(Map<String,Map<GlobalStreamId,Long>> failed) {
- this.failed = failed;
- }
-
- public void unset_failed() {
- this.failed = null;
- }
-
- /** Returns true if field failed is set (has been assigned a value) and false otherwise */
- public boolean is_set_failed() {
- return this.failed != null;
- }
-
- public void set_failed_isSet(boolean value) {
- if (!value) {
- this.failed = null;
- }
- }
-
- public int get_process_ms_avg_size() {
- return (this.process_ms_avg == null) ? 0 : this.process_ms_avg.size();
- }
-
- public void put_to_process_ms_avg(String key, Map<GlobalStreamId,Double> val) {
- if (this.process_ms_avg == null) {
- this.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- }
- this.process_ms_avg.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Double>> get_process_ms_avg() {
- return this.process_ms_avg;
- }
-
- public void set_process_ms_avg(Map<String,Map<GlobalStreamId,Double>> process_ms_avg) {
- this.process_ms_avg = process_ms_avg;
- }
-
- public void unset_process_ms_avg() {
- this.process_ms_avg = null;
- }
-
- /** Returns true if field process_ms_avg is set (has been assigned a value) and false otherwise */
- public boolean is_set_process_ms_avg() {
- return this.process_ms_avg != null;
- }
-
- public void set_process_ms_avg_isSet(boolean value) {
- if (!value) {
- this.process_ms_avg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case EMITTED:
- if (value == null) {
- unset_emitted();
- } else {
- set_emitted((Map<String,Map<String,Long>>)value);
- }
- break;
-
- case SEND_TPS:
- if (value == null) {
- unset_send_tps();
- } else {
- set_send_tps((Map<String,Map<String,Double>>)value);
- }
- break;
-
- case RECV_TPS:
- if (value == null) {
- unset_recv_tps();
- } else {
- set_recv_tps((Map<String,Map<GlobalStreamId,Double>>)value);
- }
- break;
-
- case ACKED:
- if (value == null) {
- unset_acked();
- } else {
- set_acked((Map<String,Map<GlobalStreamId,Long>>)value);
- }
- break;
-
- case FAILED:
- if (value == null) {
- unset_failed();
- } else {
- set_failed((Map<String,Map<GlobalStreamId,Long>>)value);
- }
- break;
-
- case PROCESS_MS_AVG:
- if (value == null) {
- unset_process_ms_avg();
- } else {
- set_process_ms_avg((Map<String,Map<GlobalStreamId,Double>>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case EMITTED:
- return get_emitted();
-
- case SEND_TPS:
- return get_send_tps();
-
- case RECV_TPS:
- return get_recv_tps();
-
- case ACKED:
- return get_acked();
-
- case FAILED:
- return get_failed();
-
- case PROCESS_MS_AVG:
- return get_process_ms_avg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case EMITTED:
- return is_set_emitted();
- case SEND_TPS:
- return is_set_send_tps();
- case RECV_TPS:
- return is_set_recv_tps();
- case ACKED:
- return is_set_acked();
- case FAILED:
- return is_set_failed();
- case PROCESS_MS_AVG:
- return is_set_process_ms_avg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TaskStats)
- return this.equals((TaskStats)that);
- return false;
- }
-
- public boolean equals(TaskStats that) {
- if (that == null)
- return false;
-
- boolean this_present_emitted = true && this.is_set_emitted();
- boolean that_present_emitted = true && that.is_set_emitted();
- if (this_present_emitted || that_present_emitted) {
- if (!(this_present_emitted && that_present_emitted))
- return false;
- if (!this.emitted.equals(that.emitted))
- return false;
- }
-
- boolean this_present_send_tps = true && this.is_set_send_tps();
- boolean that_present_send_tps = true && that.is_set_send_tps();
- if (this_present_send_tps || that_present_send_tps) {
- if (!(this_present_send_tps && that_present_send_tps))
- return false;
- if (!this.send_tps.equals(that.send_tps))
- return false;
- }
-
- boolean this_present_recv_tps = true && this.is_set_recv_tps();
- boolean that_present_recv_tps = true && that.is_set_recv_tps();
- if (this_present_recv_tps || that_present_recv_tps) {
- if (!(this_present_recv_tps && that_present_recv_tps))
- return false;
- if (!this.recv_tps.equals(that.recv_tps))
- return false;
- }
-
- boolean this_present_acked = true && this.is_set_acked();
- boolean that_present_acked = true && that.is_set_acked();
- if (this_present_acked || that_present_acked) {
- if (!(this_present_acked && that_present_acked))
- return false;
- if (!this.acked.equals(that.acked))
- return false;
- }
-
- boolean this_present_failed = true && this.is_set_failed();
- boolean that_present_failed = true && that.is_set_failed();
- if (this_present_failed || that_present_failed) {
- if (!(this_present_failed && that_present_failed))
- return false;
- if (!this.failed.equals(that.failed))
- return false;
- }
-
- boolean this_present_process_ms_avg = true && this.is_set_process_ms_avg();
- boolean that_present_process_ms_avg = true && that.is_set_process_ms_avg();
- if (this_present_process_ms_avg || that_present_process_ms_avg) {
- if (!(this_present_process_ms_avg && that_present_process_ms_avg))
- return false;
- if (!this.process_ms_avg.equals(that.process_ms_avg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_emitted = true && (is_set_emitted());
- builder.append(present_emitted);
- if (present_emitted)
- builder.append(emitted);
-
- boolean present_send_tps = true && (is_set_send_tps());
- builder.append(present_send_tps);
- if (present_send_tps)
- builder.append(send_tps);
-
- boolean present_recv_tps = true && (is_set_recv_tps());
- builder.append(present_recv_tps);
- if (present_recv_tps)
- builder.append(recv_tps);
-
- boolean present_acked = true && (is_set_acked());
- builder.append(present_acked);
- if (present_acked)
- builder.append(acked);
-
- boolean present_failed = true && (is_set_failed());
- builder.append(present_failed);
- if (present_failed)
- builder.append(failed);
-
- boolean present_process_ms_avg = true && (is_set_process_ms_avg());
- builder.append(present_process_ms_avg);
- if (present_process_ms_avg)
- builder.append(process_ms_avg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TaskStats other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TaskStats typedOther = (TaskStats)other;
-
- lastComparison = Boolean.valueOf(is_set_emitted()).compareTo(typedOther.is_set_emitted());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_emitted()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.emitted, typedOther.emitted);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_send_tps()).compareTo(typedOther.is_set_send_tps());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_send_tps()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.send_tps, typedOther.send_tps);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_recv_tps()).compareTo(typedOther.is_set_recv_tps());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_recv_tps()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.recv_tps, typedOther.recv_tps);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_acked()).compareTo(typedOther.is_set_acked());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_acked()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.acked, typedOther.acked);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_failed()).compareTo(typedOther.is_set_failed());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_failed()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.failed, typedOther.failed);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_process_ms_avg()).compareTo(typedOther.is_set_process_ms_avg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_process_ms_avg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.process_ms_avg, typedOther.process_ms_avg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // EMITTED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map125 = iprot.readMapBegin();
- this.emitted = new HashMap<String,Map<String,Long>>(2*_map125.size);
- for (int _i126 = 0; _i126 < _map125.size; ++_i126)
- {
- String _key127; // required
- Map<String,Long> _val128; // required
- _key127 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map129 = iprot.readMapBegin();
- _val128 = new HashMap<String,Long>(2*_map129.size);
- for (int _i130 = 0; _i130 < _map129.size; ++_i130)
- {
- String _key131; // required
- long _val132; // required
- _key131 = iprot.readString();
- _val132 = iprot.readI64();
- _val128.put(_key131, _val132);
- }
- iprot.readMapEnd();
- }
- this.emitted.put(_key127, _val128);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // SEND_TPS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map133 = iprot.readMapBegin();
- this.send_tps = new HashMap<String,Map<String,Double>>(2*_map133.size);
- for (int _i134 = 0; _i134 < _map133.size; ++_i134)
- {
- String _key135; // required
- Map<String,Double> _val136; // required
- _key135 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map137 = iprot.readMapBegin();
- _val136 = new HashMap<String,Double>(2*_map137.size);
- for (int _i138 = 0; _i138 < _map137.size; ++_i138)
- {
- String _key139; // required
- double _val140; // required
- _key139 = iprot.readString();
- _val140 = iprot.readDouble();
- _val136.put(_key139, _val140);
- }
- iprot.readMapEnd();
- }
- this.send_tps.put(_key135, _val136);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // RECV_TPS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map141 = iprot.readMapBegin();
- this.recv_tps = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map141.size);
- for (int _i142 = 0; _i142 < _map141.size; ++_i142)
- {
- String _key143; // required
- Map<GlobalStreamId,Double> _val144; // required
- _key143 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map145 = iprot.readMapBegin();
- _val144 = new HashMap<GlobalStreamId,Double>(2*_map145.size);
- for (int _i146 = 0; _i146 < _map145.size; ++_i146)
- {
- GlobalStreamId _key147; // required
- double _val148; // required
- _key147 = new GlobalStreamId();
- _key147.read(iprot);
- _val148 = iprot.readDouble();
- _val144.put(_key147, _val148);
- }
- iprot.readMapEnd();
- }
- this.recv_tps.put(_key143, _val144);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // ACKED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map149 = iprot.readMapBegin();
- this.acked = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map149.size);
- for (int _i150 = 0; _i150 < _map149.size; ++_i150)
- {
- String _key151; // required
- Map<GlobalStreamId,Long> _val152; // required
- _key151 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map153 = iprot.readMapBegin();
- _val152 = new HashMap<GlobalStreamId,Long>(2*_map153.size);
- for (int _i154 = 0; _i154 < _map153.size; ++_i154)
- {
- GlobalStreamId _key155; // required
- long _val156; // required
- _key155 = new GlobalStreamId();
- _key155.read(iprot);
- _val156 = iprot.readI64();
- _val152.put(_key155, _val156);
- }
- iprot.readMapEnd();
- }
- this.acked.put(_key151, _val152);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // FAILED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map157 = iprot.readMapBegin();
- this.failed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map157.size);
- for (int _i158 = 0; _i158 < _map157.size; ++_i158)
- {
- String _key159; // required
- Map<GlobalStreamId,Long> _val160; // required
- _key159 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map161 = iprot.readMapBegin();
- _val160 = new HashMap<GlobalStreamId,Long>(2*_map161.size);
- for (int _i162 = 0; _i162 < _map161.size; ++_i162)
- {
- GlobalStreamId _key163; // required
- long _val164; // required
- _key163 = new GlobalStreamId();
- _key163.read(iprot);
- _val164 = iprot.readI64();
- _val160.put(_key163, _val164);
- }
- iprot.readMapEnd();
- }
- this.failed.put(_key159, _val160);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // PROCESS_MS_AVG
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map165 = iprot.readMapBegin();
- this.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map165.size);
- for (int _i166 = 0; _i166 < _map165.size; ++_i166)
- {
- String _key167; // required
- Map<GlobalStreamId,Double> _val168; // required
- _key167 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map169 = iprot.readMapBegin();
- _val168 = new HashMap<GlobalStreamId,Double>(2*_map169.size);
- for (int _i170 = 0; _i170 < _map169.size; ++_i170)
- {
- GlobalStreamId _key171; // required
- double _val172; // required
- _key171 = new GlobalStreamId();
- _key171.read(iprot);
- _val172 = iprot.readDouble();
- _val168.put(_key171, _val172);
- }
- iprot.readMapEnd();
- }
- this.process_ms_avg.put(_key167, _val168);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.emitted != null) {
- oprot.writeFieldBegin(EMITTED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.emitted.size()));
- for (Map.Entry<String, Map<String,Long>> _iter173 : this.emitted.entrySet())
- {
- oprot.writeString(_iter173.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.I64, _iter173.getValue().size()));
- for (Map.Entry<String, Long> _iter174 : _iter173.getValue().entrySet())
- {
- oprot.writeString(_iter174.getKey());
- oprot.writeI64(_iter174.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.send_tps != null) {
- oprot.writeFieldBegin(SEND_TPS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.send_tps.size()));
- for (Map.Entry<String, Map<String,Double>> _iter175 : this.send_tps.entrySet())
- {
- oprot.writeString(_iter175.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, _iter175.getValue().size()));
- for (Map.Entry<String, Double> _iter176 : _iter175.getValue().entrySet())
- {
- oprot.writeString(_iter176.getKey());
- oprot.writeDouble(_iter176.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.recv_tps != null) {
- oprot.writeFieldBegin(RECV_TPS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.recv_tps.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter177 : this.recv_tps.entrySet())
- {
- oprot.writeString(_iter177.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.DOUBLE, _iter177.getValue().size()));
- for (Map.Entry<GlobalStreamId, Double> _iter178 : _iter177.getValue().entrySet())
- {
- _iter178.getKey().write(oprot);
- oprot.writeDouble(_iter178.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.acked != null) {
- oprot.writeFieldBegin(ACKED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.acked.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter179 : this.acked.entrySet())
- {
- oprot.writeString(_iter179.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.I64, _iter179.getValue().size()));
- for (Map.Entry<GlobalStreamId, Long> _iter180 : _iter179.getValue().entrySet())
- {
- _iter180.getKey().write(oprot);
- oprot.writeI64(_iter180.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.failed != null) {
- oprot.writeFieldBegin(FAILED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.failed.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter181 : this.failed.entrySet())
- {
- oprot.writeString(_iter181.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.I64, _iter181.getValue().size()));
- for (Map.Entry<GlobalStreamId, Long> _iter182 : _iter181.getValue().entrySet())
- {
- _iter182.getKey().write(oprot);
- oprot.writeI64(_iter182.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.process_ms_avg != null) {
- oprot.writeFieldBegin(PROCESS_MS_AVG_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.process_ms_avg.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter183 : this.process_ms_avg.entrySet())
- {
- oprot.writeString(_iter183.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.DOUBLE, _iter183.getValue().size()));
- for (Map.Entry<GlobalStreamId, Double> _iter184 : _iter183.getValue().entrySet())
- {
- _iter184.getKey().write(oprot);
- oprot.writeDouble(_iter184.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TaskStats(");
- boolean first = true;
-
- sb.append("emitted:");
- if (this.emitted == null) {
- sb.append("null");
- } else {
- sb.append(this.emitted);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("send_tps:");
- if (this.send_tps == null) {
- sb.append("null");
- } else {
- sb.append(this.send_tps);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("recv_tps:");
- if (this.recv_tps == null) {
- sb.append("null");
- } else {
- sb.append(this.recv_tps);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("acked:");
- if (this.acked == null) {
- sb.append("null");
- } else {
- sb.append(this.acked);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("failed:");
- if (this.failed == null) {
- sb.append("null");
- } else {
- sb.append(this.failed);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("process_ms_avg:");
- if (this.process_ms_avg == null) {
- sb.append("null");
- } else {
- sb.append(this.process_ms_avg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_emitted()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'emitted' is unset! Struct:" + toString());
- }
-
- if (!is_set_send_tps()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'send_tps' is unset! Struct:" + toString());
- }
-
- if (!is_set_recv_tps()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'recv_tps' is unset! Struct:" + toString());
- }
-
- if (!is_set_acked()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'acked' is unset! Struct:" + toString());
- }
-
- if (!is_set_failed()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'failed' is unset! Struct:" + toString());
- }
-
- if (!is_set_process_ms_avg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'process_ms_avg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TaskSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TaskSummary.java b/jstorm-client/src/main/java/backtype/storm/generated/TaskSummary.java
deleted file mode 100644
index 3483772..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TaskSummary.java
+++ /dev/null
@@ -1,1122 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TaskSummary implements org.apache.thrift7.TBase<TaskSummary, TaskSummary._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TaskSummary");
-
- private static final org.apache.thrift7.protocol.TField TASK_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("task_id", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField COMPONENT_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("component_id", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField HOST_FIELD_DESC = new org.apache.thrift7.protocol.TField("host", org.apache.thrift7.protocol.TType.STRING, (short)3);
- private static final org.apache.thrift7.protocol.TField PORT_FIELD_DESC = new org.apache.thrift7.protocol.TField("port", org.apache.thrift7.protocol.TType.I32, (short)4);
- private static final org.apache.thrift7.protocol.TField UPTIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("uptime_secs", org.apache.thrift7.protocol.TType.I32, (short)5);
- private static final org.apache.thrift7.protocol.TField ERRORS_FIELD_DESC = new org.apache.thrift7.protocol.TField("errors", org.apache.thrift7.protocol.TType.LIST, (short)6);
- private static final org.apache.thrift7.protocol.TField STATS_FIELD_DESC = new org.apache.thrift7.protocol.TField("stats", org.apache.thrift7.protocol.TType.STRUCT, (short)7);
- private static final org.apache.thrift7.protocol.TField COMPONENT_TYPE_FIELD_DESC = new org.apache.thrift7.protocol.TField("component_type", org.apache.thrift7.protocol.TType.STRING, (short)8);
- private static final org.apache.thrift7.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift7.protocol.TField("status", org.apache.thrift7.protocol.TType.STRING, (short)9);
-
- private int task_id; // required
- private String component_id; // required
- private String host; // required
- private int port; // required
- private int uptime_secs; // required
- private List<ErrorInfo> errors; // required
- private TaskStats stats; // required
- private String component_type; // required
- private String status; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- TASK_ID((short)1, "task_id"),
- COMPONENT_ID((short)2, "component_id"),
- HOST((short)3, "host"),
- PORT((short)4, "port"),
- UPTIME_SECS((short)5, "uptime_secs"),
- ERRORS((short)6, "errors"),
- STATS((short)7, "stats"),
- COMPONENT_TYPE((short)8, "component_type"),
- STATUS((short)9, "status");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // TASK_ID
- return TASK_ID;
- case 2: // COMPONENT_ID
- return COMPONENT_ID;
- case 3: // HOST
- return HOST;
- case 4: // PORT
- return PORT;
- case 5: // UPTIME_SECS
- return UPTIME_SECS;
- case 6: // ERRORS
- return ERRORS;
- case 7: // STATS
- return STATS;
- case 8: // COMPONENT_TYPE
- return COMPONENT_TYPE;
- case 9: // STATUS
- return STATUS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __TASK_ID_ISSET_ID = 0;
- private static final int __PORT_ISSET_ID = 1;
- private static final int __UPTIME_SECS_ISSET_ID = 2;
- private BitSet __isset_bit_vector = new BitSet(3);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TASK_ID, new org.apache.thrift7.meta_data.FieldMetaData("task_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.COMPONENT_ID, new org.apache.thrift7.meta_data.FieldMetaData("component_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.HOST, new org.apache.thrift7.meta_data.FieldMetaData("host", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.PORT, new org.apache.thrift7.meta_data.FieldMetaData("port", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.UPTIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("uptime_secs", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.ERRORS, new org.apache.thrift7.meta_data.FieldMetaData("errors", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ErrorInfo.class))));
- tmpMap.put(_Fields.STATS, new org.apache.thrift7.meta_data.FieldMetaData("stats", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, TaskStats.class)));
- tmpMap.put(_Fields.COMPONENT_TYPE, new org.apache.thrift7.meta_data.FieldMetaData("component_type", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.STATUS, new org.apache.thrift7.meta_data.FieldMetaData("status", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TaskSummary.class, metaDataMap);
- }
-
- public TaskSummary() {
- }
-
- public TaskSummary(
- int task_id,
- String component_id,
- String host,
- int port)
- {
- this();
- this.task_id = task_id;
- set_task_id_isSet(true);
- this.component_id = component_id;
- this.host = host;
- this.port = port;
- set_port_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TaskSummary(TaskSummary other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.task_id = other.task_id;
- if (other.is_set_component_id()) {
- this.component_id = other.component_id;
- }
- if (other.is_set_host()) {
- this.host = other.host;
- }
- this.port = other.port;
- this.uptime_secs = other.uptime_secs;
- if (other.is_set_errors()) {
- List<ErrorInfo> __this__errors = new ArrayList<ErrorInfo>();
- for (ErrorInfo other_element : other.errors) {
- __this__errors.add(new ErrorInfo(other_element));
- }
- this.errors = __this__errors;
- }
- if (other.is_set_stats()) {
- this.stats = new TaskStats(other.stats);
- }
- if (other.is_set_component_type()) {
- this.component_type = other.component_type;
- }
- if (other.is_set_status()) {
- this.status = other.status;
- }
- }
-
- public TaskSummary deepCopy() {
- return new TaskSummary(this);
- }
-
- @Override
- public void clear() {
- set_task_id_isSet(false);
- this.task_id = 0;
- this.component_id = null;
- this.host = null;
- set_port_isSet(false);
- this.port = 0;
- set_uptime_secs_isSet(false);
- this.uptime_secs = 0;
- this.errors = null;
- this.stats = null;
- this.component_type = null;
- this.status = null;
- }
-
- public int get_task_id() {
- return this.task_id;
- }
-
- public void set_task_id(int task_id) {
- this.task_id = task_id;
- set_task_id_isSet(true);
- }
-
- public void unset_task_id() {
- __isset_bit_vector.clear(__TASK_ID_ISSET_ID);
- }
-
- /** Returns true if field task_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_task_id() {
- return __isset_bit_vector.get(__TASK_ID_ISSET_ID);
- }
-
- public void set_task_id_isSet(boolean value) {
- __isset_bit_vector.set(__TASK_ID_ISSET_ID, value);
- }
-
- public String get_component_id() {
- return this.component_id;
- }
-
- public void set_component_id(String component_id) {
- this.component_id = component_id;
- }
-
- public void unset_component_id() {
- this.component_id = null;
- }
-
- /** Returns true if field component_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_component_id() {
- return this.component_id != null;
- }
-
- public void set_component_id_isSet(boolean value) {
- if (!value) {
- this.component_id = null;
- }
- }
-
- public String get_host() {
- return this.host;
- }
-
- public void set_host(String host) {
- this.host = host;
- }
-
- public void unset_host() {
- this.host = null;
- }
-
- /** Returns true if field host is set (has been assigned a value) and false otherwise */
- public boolean is_set_host() {
- return this.host != null;
- }
-
- public void set_host_isSet(boolean value) {
- if (!value) {
- this.host = null;
- }
- }
-
- public int get_port() {
- return this.port;
- }
-
- public void set_port(int port) {
- this.port = port;
- set_port_isSet(true);
- }
-
- public void unset_port() {
- __isset_bit_vector.clear(__PORT_ISSET_ID);
- }
-
- /** Returns true if field port is set (has been assigned a value) and false otherwise */
- public boolean is_set_port() {
- return __isset_bit_vector.get(__PORT_ISSET_ID);
- }
-
- public void set_port_isSet(boolean value) {
- __isset_bit_vector.set(__PORT_ISSET_ID, value);
- }
-
- public int get_uptime_secs() {
- return this.uptime_secs;
- }
-
- public void set_uptime_secs(int uptime_secs) {
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- }
-
- public void unset_uptime_secs() {
- __isset_bit_vector.clear(__UPTIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field uptime_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_uptime_secs() {
- return __isset_bit_vector.get(__UPTIME_SECS_ISSET_ID);
- }
-
- public void set_uptime_secs_isSet(boolean value) {
- __isset_bit_vector.set(__UPTIME_SECS_ISSET_ID, value);
- }
-
- public int get_errors_size() {
- return (this.errors == null) ? 0 : this.errors.size();
- }
-
- public java.util.Iterator<ErrorInfo> get_errors_iterator() {
- return (this.errors == null) ? null : this.errors.iterator();
- }
-
- public void add_to_errors(ErrorInfo elem) {
- if (this.errors == null) {
- this.errors = new ArrayList<ErrorInfo>();
- }
- this.errors.add(elem);
- }
-
- public List<ErrorInfo> get_errors() {
- return this.errors;
- }
-
- public void set_errors(List<ErrorInfo> errors) {
- this.errors = errors;
- }
-
- public void unset_errors() {
- this.errors = null;
- }
-
- /** Returns true if field errors is set (has been assigned a value) and false otherwise */
- public boolean is_set_errors() {
- return this.errors != null;
- }
-
- public void set_errors_isSet(boolean value) {
- if (!value) {
- this.errors = null;
- }
- }
-
- public TaskStats get_stats() {
- return this.stats;
- }
-
- public void set_stats(TaskStats stats) {
- this.stats = stats;
- }
-
- public void unset_stats() {
- this.stats = null;
- }
-
- /** Returns true if field stats is set (has been assigned a value) and false otherwise */
- public boolean is_set_stats() {
- return this.stats != null;
- }
-
- public void set_stats_isSet(boolean value) {
- if (!value) {
- this.stats = null;
- }
- }
-
- public String get_component_type() {
- return this.component_type;
- }
-
- public void set_component_type(String component_type) {
- this.component_type = component_type;
- }
-
- public void unset_component_type() {
- this.component_type = null;
- }
-
- /** Returns true if field component_type is set (has been assigned a value) and false otherwise */
- public boolean is_set_component_type() {
- return this.component_type != null;
- }
-
- public void set_component_type_isSet(boolean value) {
- if (!value) {
- this.component_type = null;
- }
- }
-
- public String get_status() {
- return this.status;
- }
-
- public void set_status(String status) {
- this.status = status;
- }
-
- public void unset_status() {
- this.status = null;
- }
-
- /** Returns true if field status is set (has been assigned a value) and false otherwise */
- public boolean is_set_status() {
- return this.status != null;
- }
-
- public void set_status_isSet(boolean value) {
- if (!value) {
- this.status = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case TASK_ID:
- if (value == null) {
- unset_task_id();
- } else {
- set_task_id((Integer)value);
- }
- break;
-
- case COMPONENT_ID:
- if (value == null) {
- unset_component_id();
- } else {
- set_component_id((String)value);
- }
- break;
-
- case HOST:
- if (value == null) {
- unset_host();
- } else {
- set_host((String)value);
- }
- break;
-
- case PORT:
- if (value == null) {
- unset_port();
- } else {
- set_port((Integer)value);
- }
- break;
-
- case UPTIME_SECS:
- if (value == null) {
- unset_uptime_secs();
- } else {
- set_uptime_secs((Integer)value);
- }
- break;
-
- case ERRORS:
- if (value == null) {
- unset_errors();
- } else {
- set_errors((List<ErrorInfo>)value);
- }
- break;
-
- case STATS:
- if (value == null) {
- unset_stats();
- } else {
- set_stats((TaskStats)value);
- }
- break;
-
- case COMPONENT_TYPE:
- if (value == null) {
- unset_component_type();
- } else {
- set_component_type((String)value);
- }
- break;
-
- case STATUS:
- if (value == null) {
- unset_status();
- } else {
- set_status((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case TASK_ID:
- return Integer.valueOf(get_task_id());
-
- case COMPONENT_ID:
- return get_component_id();
-
- case HOST:
- return get_host();
-
- case PORT:
- return Integer.valueOf(get_port());
-
- case UPTIME_SECS:
- return Integer.valueOf(get_uptime_secs());
-
- case ERRORS:
- return get_errors();
-
- case STATS:
- return get_stats();
-
- case COMPONENT_TYPE:
- return get_component_type();
-
- case STATUS:
- return get_status();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case TASK_ID:
- return is_set_task_id();
- case COMPONENT_ID:
- return is_set_component_id();
- case HOST:
- return is_set_host();
- case PORT:
- return is_set_port();
- case UPTIME_SECS:
- return is_set_uptime_secs();
- case ERRORS:
- return is_set_errors();
- case STATS:
- return is_set_stats();
- case COMPONENT_TYPE:
- return is_set_component_type();
- case STATUS:
- return is_set_status();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TaskSummary)
- return this.equals((TaskSummary)that);
- return false;
- }
-
- public boolean equals(TaskSummary that) {
- if (that == null)
- return false;
-
- boolean this_present_task_id = true;
- boolean that_present_task_id = true;
- if (this_present_task_id || that_present_task_id) {
- if (!(this_present_task_id && that_present_task_id))
- return false;
- if (this.task_id != that.task_id)
- return false;
- }
-
- boolean this_present_component_id = true && this.is_set_component_id();
- boolean that_present_component_id = true && that.is_set_component_id();
- if (this_present_component_id || that_present_component_id) {
- if (!(this_present_component_id && that_present_component_id))
- return false;
- if (!this.component_id.equals(that.component_id))
- return false;
- }
-
- boolean this_present_host = true && this.is_set_host();
- boolean that_present_host = true && that.is_set_host();
- if (this_present_host || that_present_host) {
- if (!(this_present_host && that_present_host))
- return false;
- if (!this.host.equals(that.host))
- return false;
- }
-
- boolean this_present_port = true;
- boolean that_present_port = true;
- if (this_present_port || that_present_port) {
- if (!(this_present_port && that_present_port))
- return false;
- if (this.port != that.port)
- return false;
- }
-
- boolean this_present_uptime_secs = true && this.is_set_uptime_secs();
- boolean that_present_uptime_secs = true && that.is_set_uptime_secs();
- if (this_present_uptime_secs || that_present_uptime_secs) {
- if (!(this_present_uptime_secs && that_present_uptime_secs))
- return false;
- if (this.uptime_secs != that.uptime_secs)
- return false;
- }
-
- boolean this_present_errors = true && this.is_set_errors();
- boolean that_present_errors = true && that.is_set_errors();
- if (this_present_errors || that_present_errors) {
- if (!(this_present_errors && that_present_errors))
- return false;
- if (!this.errors.equals(that.errors))
- return false;
- }
-
- boolean this_present_stats = true && this.is_set_stats();
- boolean that_present_stats = true && that.is_set_stats();
- if (this_present_stats || that_present_stats) {
- if (!(this_present_stats && that_present_stats))
- return false;
- if (!this.stats.equals(that.stats))
- return false;
- }
-
- boolean this_present_component_type = true && this.is_set_component_type();
- boolean that_present_component_type = true && that.is_set_component_type();
- if (this_present_component_type || that_present_component_type) {
- if (!(this_present_component_type && that_present_component_type))
- return false;
- if (!this.component_type.equals(that.component_type))
- return false;
- }
-
- boolean this_present_status = true && this.is_set_status();
- boolean that_present_status = true && that.is_set_status();
- if (this_present_status || that_present_status) {
- if (!(this_present_status && that_present_status))
- return false;
- if (!this.status.equals(that.status))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_task_id = true;
- builder.append(present_task_id);
- if (present_task_id)
- builder.append(task_id);
-
- boolean present_component_id = true && (is_set_component_id());
- builder.append(present_component_id);
- if (present_component_id)
- builder.append(component_id);
-
- boolean present_host = true && (is_set_host());
- builder.append(present_host);
- if (present_host)
- builder.append(host);
-
- boolean present_port = true;
- builder.append(present_port);
- if (present_port)
- builder.append(port);
-
- boolean present_uptime_secs = true && (is_set_uptime_secs());
- builder.append(present_uptime_secs);
- if (present_uptime_secs)
- builder.append(uptime_secs);
-
- boolean present_errors = true && (is_set_errors());
- builder.append(present_errors);
- if (present_errors)
- builder.append(errors);
-
- boolean present_stats = true && (is_set_stats());
- builder.append(present_stats);
- if (present_stats)
- builder.append(stats);
-
- boolean present_component_type = true && (is_set_component_type());
- builder.append(present_component_type);
- if (present_component_type)
- builder.append(component_type);
-
- boolean present_status = true && (is_set_status());
- builder.append(present_status);
- if (present_status)
- builder.append(status);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TaskSummary other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TaskSummary typedOther = (TaskSummary)other;
-
- lastComparison = Boolean.valueOf(is_set_task_id()).compareTo(typedOther.is_set_task_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_task_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.task_id, typedOther.task_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_component_id()).compareTo(typedOther.is_set_component_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_component_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.component_id, typedOther.component_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_host()).compareTo(typedOther.is_set_host());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_host()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.host, typedOther.host);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_port()).compareTo(typedOther.is_set_port());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_port()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.port, typedOther.port);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_uptime_secs()).compareTo(typedOther.is_set_uptime_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_uptime_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.uptime_secs, typedOther.uptime_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_errors()).compareTo(typedOther.is_set_errors());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_errors()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.errors, typedOther.errors);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_stats()).compareTo(typedOther.is_set_stats());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_stats()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.stats, typedOther.stats);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_component_type()).compareTo(typedOther.is_set_component_type());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_component_type()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.component_type, typedOther.component_type);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_status()).compareTo(typedOther.is_set_status());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_status()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.status, typedOther.status);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // TASK_ID
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.task_id = iprot.readI32();
- set_task_id_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // COMPONENT_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.component_id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // HOST
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.host = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // PORT
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.port = iprot.readI32();
- set_port_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // UPTIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.uptime_secs = iprot.readI32();
- set_uptime_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // ERRORS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list185 = iprot.readListBegin();
- this.errors = new ArrayList<ErrorInfo>(_list185.size);
- for (int _i186 = 0; _i186 < _list185.size; ++_i186)
- {
- ErrorInfo _elem187; // required
- _elem187 = new ErrorInfo();
- _elem187.read(iprot);
- this.errors.add(_elem187);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 7: // STATS
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.stats = new TaskStats();
- this.stats.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 8: // COMPONENT_TYPE
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.component_type = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 9: // STATUS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.status = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldBegin(TASK_ID_FIELD_DESC);
- oprot.writeI32(this.task_id);
- oprot.writeFieldEnd();
- if (this.component_id != null) {
- oprot.writeFieldBegin(COMPONENT_ID_FIELD_DESC);
- oprot.writeString(this.component_id);
- oprot.writeFieldEnd();
- }
- if (this.host != null) {
- oprot.writeFieldBegin(HOST_FIELD_DESC);
- oprot.writeString(this.host);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(PORT_FIELD_DESC);
- oprot.writeI32(this.port);
- oprot.writeFieldEnd();
- if (is_set_uptime_secs()) {
- oprot.writeFieldBegin(UPTIME_SECS_FIELD_DESC);
- oprot.writeI32(this.uptime_secs);
- oprot.writeFieldEnd();
- }
- if (this.errors != null) {
- if (is_set_errors()) {
- oprot.writeFieldBegin(ERRORS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.errors.size()));
- for (ErrorInfo _iter188 : this.errors)
- {
- _iter188.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- }
- if (this.stats != null) {
- if (is_set_stats()) {
- oprot.writeFieldBegin(STATS_FIELD_DESC);
- this.stats.write(oprot);
- oprot.writeFieldEnd();
- }
- }
- if (this.component_type != null) {
- if (is_set_component_type()) {
- oprot.writeFieldBegin(COMPONENT_TYPE_FIELD_DESC);
- oprot.writeString(this.component_type);
- oprot.writeFieldEnd();
- }
- }
- if (this.status != null) {
- if (is_set_status()) {
- oprot.writeFieldBegin(STATUS_FIELD_DESC);
- oprot.writeString(this.status);
- oprot.writeFieldEnd();
- }
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TaskSummary(");
- boolean first = true;
-
- sb.append("task_id:");
- sb.append(this.task_id);
- first = false;
- if (!first) sb.append(", ");
- sb.append("component_id:");
- if (this.component_id == null) {
- sb.append("null");
- } else {
- sb.append(this.component_id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("host:");
- if (this.host == null) {
- sb.append("null");
- } else {
- sb.append(this.host);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("port:");
- sb.append(this.port);
- first = false;
- if (is_set_uptime_secs()) {
- if (!first) sb.append(", ");
- sb.append("uptime_secs:");
- sb.append(this.uptime_secs);
- first = false;
- }
- if (is_set_errors()) {
- if (!first) sb.append(", ");
- sb.append("errors:");
- if (this.errors == null) {
- sb.append("null");
- } else {
- sb.append(this.errors);
- }
- first = false;
- }
- if (is_set_stats()) {
- if (!first) sb.append(", ");
- sb.append("stats:");
- if (this.stats == null) {
- sb.append("null");
- } else {
- sb.append(this.stats);
- }
- first = false;
- }
- if (is_set_component_type()) {
- if (!first) sb.append(", ");
- sb.append("component_type:");
- if (this.component_type == null) {
- sb.append("null");
- } else {
- sb.append(this.component_type);
- }
- first = false;
- }
- if (is_set_status()) {
- if (!first) sb.append(", ");
- sb.append("status:");
- if (this.status == null) {
- sb.append("null");
- } else {
- sb.append(this.status);
- }
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_task_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'task_id' is unset! Struct:" + toString());
- }
-
- if (!is_set_component_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'component_id' is unset! Struct:" + toString());
- }
-
- if (!is_set_host()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'host' is unset! Struct:" + toString());
- }
-
- if (!is_set_port()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'port' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ThriftResourceType.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ThriftResourceType.java b/jstorm-client/src/main/java/backtype/storm/generated/ThriftResourceType.java
deleted file mode 100644
index 34617a6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ThriftResourceType.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-
-import java.util.Map;
-import java.util.HashMap;
-import org.apache.thrift7.TEnum;
-
-public enum ThriftResourceType implements org.apache.thrift7.TEnum {
- UNKNOWN(1),
- MEM(2),
- NET(3);
-
- private final int value;
-
- private ThriftResourceType(int value) {
- this.value = value;
- }
-
- /**
- * Get the integer value of this enum value, as defined in the Thrift IDL.
- */
- public int getValue() {
- return value;
- }
-
- /**
- * Find a the enum type by its integer value, as defined in the Thrift IDL.
- * @return null if the value is not found.
- */
- public static ThriftResourceType findByValue(int value) {
- switch (value) {
- case 1:
- return UNKNOWN;
- case 2:
- return MEM;
- case 3:
- return NET;
- default:
- return null;
- }
- }
-}
[48/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/history_cn.md
----------------------------------------------------------------------
diff --git a/history_cn.md b/history_cn.md
index 1b90dd1..e57e9bb 100644
--- a/history_cn.md
+++ b/history_cn.md
@@ -1,5 +1,82 @@
[JStorm English introduction](http://42.121.19.155/jstorm/JStorm-introduce-en.pptx)
[JStorm Chinese introduction](http://42.121.19.155/jstorm/JStorm-introduce.pptx)
+#Release 2.0.4-SNAPSHOT
+## New features
+1.完全重构采样系统, 使用全新的Rollingwindow和Metric计算方式,尤其是netty采样数据,另外metric 发送和接收将不通过zk
+2.完全重构web-ui
+3.引入rocketdb,增加nimbus cache layer
+4.梳理所有的zk节点和zk操作, 去掉无用的zk 操作
+5.梳理所有的thrift 数据结构和函数, 去掉无用的rpc函数
+6.将jstorm-client/jstorm-client-extension/jstorm-core整合为jstorm-core
+7.同步依赖和storm一样
+8.同步apache-storm-0.10.0-beta1 java 代码
+9.切换日志系统到logback
+10.升级thrift 到apache thrift 0.9.2
+11. 针对超大型任务600个worker/2000个task以上任务进行优化
+12. 要求 jdk7 or higher
+
+#Release 0.9.7.1
+## New features
+1. 增加Tuple自动batch的支持,以提高TPS以及降低消息处理延迟(task.batch.tuple=true,task.msg.batch.size=4)
+2. localFirst在本地节点处理能力跟不上时,自动对外部节点进行扩容
+3. 任务运行时,支持对任务配置的动态更新
+4. 支持任务对task心跳和task cleanup超时时间的自定义设置
+5. 增加disruptor queue对非阻塞模式TimeoutBlockingWaitStrategy的支持
+6. 增加Netty层消息发送超时时间设置的支持,以及Netty Client配置的优化
+7. 更新Tuple消息处理架构。去除不必要的总接收和总发送队列,减少消息流动环节,提高性能以及降低jstorm自身的cpu消耗。
+8. 增加客户端"--include-jars", 提交任务时,可以依赖额外的jar
+9. 启动nimbus/supervisor时, 如果取得的是127.0.0.0地址时, 拒绝启动
+10. 增加自定义样例
+11. 合并supervisor 的zk同步线程syncSupervisor和worker同步线程syncProcess
+## 配置变更
+1. 默认超时心跳时间设置为4分钟
+2. 修改netty 线程池clientScheduleService大小为5
+## Bug fix
+1. 优化gc参数,4g以下内存的worker默认4个gc线程,4g以上内存, 按内存大小/1g * 1.5原则设置gc线程数量
+2. Fix在bolt处理速度慢时,可能出现的task心跳更新不及时的bug
+3. Fix在一些情况下,netty连接重连时的异常等待bug
+4. 提交任务时, 避免重复创建thrift client
+5. Fix 启动worker失败时,重复下载binary问题
+##运维和脚本
+1. 优化cleandisk.sh脚本, 防止把当前目录删除和/tmp/hsperfdata_admin/
+2. 增加example下脚本执行权限
+3. 添加参数supervisor.host.start: true/false,可以通过脚本start.sh批量控制启动supervisor或不启动supervisor,默认是启动supervisor
+
+#Release 0.9.7
+## New features
+1. 实现topology任务并发动态调整的功能。在任务不下线的情况下,可以动态的对worker,spout, bolt或者ack进行扩容或缩容。rebalance命令被扩展用于支持动态扩容/缩容功能。
+2. 当打开资源隔离时,增加worker对cpu核使用上限的控制
+3. 调整task心跳更新机制。保证能正确反映spout/bolt exectue主线程的状态。
+4. 对worker和task的日志,增加jstorm信息前缀(clusterName, topologyName, ip:port, componentName, taskId, taskIndex)的支持
+5. 对topology任务调度时,增加对supervisor心跳状态的检查,不往无响应的supervisor调度任务
+6. 增加metric查询API,如: task的队列负载情况,worker的cpu,memory使用情况
+7. 增加supervisor上对任务jar包下载的重试,让worker不会因为jar在下载过程中的损坏,而启动失败
+8. 增加ZK Cache功能, 加快zk 读取速度, 并对部分节点采取直读方式
+9. 增加thrift getVersion api, 当客户端和服务器端版本不一致是,报warning
+10. 增加supervisor 心跳检查, 会拒绝分配任务到supervisor心跳超时的supervisor
+11. 更新发送到Alimonitor的user defined metrics 数据结构
+12. 增加客户端exclude-jar 功能, 当客户端提交任务时,可以通过exclude-jar和classloader来解决jar冲突问题。
+## 配置变更
+1. 修改supervisor到nimbus的心跳 超时时间到180秒
+2. 为避免内存outofmemory, 设置storm.messaging.netty.max.pending默认值为4
+3. 设置Nimbus 内存至4G
+4. 调大队列大小 task 队列大小为1024, 总发送队列和总接收队列为2048
+## Bug fix
+1. 短时间能多次restart worker配置多的任务时,由于Nimbus thrift thread的OOM导致,Supervisor可能出现假死的情况
+2. 同时提交任务,后续的任务可能会失败
+3. tickTuple不需要ack,更正对于tickTuple不正确的failed消息统计
+4. 解决use.old.assignment=true时,默认调度可能出现错误
+5. 解决删除topology zk 清理不干净问题
+6. 解决当做任务分配时, restart topology失败问题
+7. 解决同时提交多个topology 竞争问题
+8. 解决NPE 当注册metrics
+9. 解决 zkTool 读取 monitor的 znode 失败问题
+10.解决 本地模式和打开classloader模式下, 出现异常问题
+11.解决使用自定义日志logback时, 本地模式下,打印双份日志问题
+## 运维& 脚本
+1. Add rpm build spec
+2. Add deploy files of jstorm for rpm package building
+3. cronjob改成每小时运行一次, 并且coredump 改成保留1个小时
#Release 0.9.6.3
## New features
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/pom.xml b/jstorm-client-extension/pom.xml
deleted file mode 100644
index 40650cd..0000000
--- a/jstorm-client-extension/pom.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
- xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-
- <parent>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-all</artifactId>
- <version>0.9.6.3</version>
- <relativePath>..</relativePath>
- </parent>
- <!-- <parent>
- <groupId>com.taobao</groupId>
- <artifactId>parent</artifactId>
- <version>1.0.2</version>
- </parent> -->
- <modelVersion>4.0.0</modelVersion>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
- <version>0.9.6.3</version>
- <packaging>jar</packaging>
- <name>${project.artifactId}-${project.version}</name>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-compiler-plugin</artifactId>
- <version>2.3.2</version>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-source-plugin</artifactId>
- <executions>
- <execution>
- <id>attach-sources</id>
- <goals>
- <goal>jar</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- </plugins>
- </build>
- <properties>
- <powermock.version>1.4.11</powermock.version>
- <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- </properties>
- <dependencies>
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${project.version}</version>
- <scope>provided</scope>
- </dependency>
- <dependency>
- <groupId>com.alibaba</groupId>
- <artifactId>fastjson</artifactId>
- <version>1.1.41</version>
- </dependency>
- <dependency>
- <groupId>com.sun.net.httpserver</groupId>
- <artifactId>http</artifactId>
- <version>20070405</version>
- </dependency>
- <dependency>
- <groupId>org.powermock</groupId>
- <artifactId>powermock-module-junit4</artifactId>
- <version>${powermock.version}</version>
- <scope>test</scope>
- </dependency>
- <dependency>
- <groupId>com.codahale.metrics</groupId>
- <artifactId>metrics-core</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.codahale.metrics</groupId>
- <artifactId>metrics-jvm</artifactId>
- <version>3.0.1</version>
- </dependency>
- </dependencies>
-</project>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchId.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchId.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchId.java
deleted file mode 100644
index e147db0..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchId.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import java.io.Serializable;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-public class BatchId implements Serializable {
- private static final long serialVersionUID = 5720810158625748049L;
- protected final long id;
-
- protected BatchId(long id) {
- this.id = id;
- }
-
- public long getId() {
- return id;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + (int) (id ^ (id >>> 32));
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- BatchId other = (BatchId) obj;
- if (id != other.id)
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-
- private static AtomicLong staticId = new AtomicLong(0);
-
- public static void updateId(long id) {
- staticId.set(id);
- }
-
- public static BatchId mkInstance() {
- long id = staticId.incrementAndGet();
-
- return new BatchId(id);
- }
-
- public static BatchId incBatchId(BatchId old) {
- long other = old.getId();
- return new BatchId(other + 1);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchTopologyBuilder.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchTopologyBuilder.java
deleted file mode 100644
index dc599f1..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/BatchTopologyBuilder.java
+++ /dev/null
@@ -1,69 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.SpoutDeclarer;
-import backtype.storm.topology.TopologyBuilder;
-
-import com.alibaba.jstorm.batch.impl.BatchSpoutTrigger;
-import com.alibaba.jstorm.batch.impl.CoordinatedBolt;
-import com.alibaba.jstorm.batch.util.BatchDef;
-
-public class BatchTopologyBuilder {
- private static final Logger LOG = Logger
- .getLogger(BatchTopologyBuilder.class);
-
- private TopologyBuilder topologyBuilder;
-
- private SpoutDeclarer spoutDeclarer;
-
- public BatchTopologyBuilder(String topologyName) {
- topologyBuilder = new TopologyBuilder();
-
- spoutDeclarer = topologyBuilder.setSpout(BatchDef.SPOUT_TRIGGER,
- new BatchSpoutTrigger(), 1);
- }
-
- public BoltDeclarer setSpout(String id, IBatchSpout spout, int paralel) {
-
- BoltDeclarer boltDeclarer = this
- .setBolt(id, (IBatchSpout) spout, paralel);
- boltDeclarer.allGrouping(BatchDef.SPOUT_TRIGGER,
- BatchDef.COMPUTING_STREAM_ID);
-
- return boltDeclarer;
- }
-
- public BoltDeclarer setBolt(String id, IBasicBolt bolt, int paralel) {
- CoordinatedBolt coordinatedBolt = new CoordinatedBolt(bolt);
-
- BoltDeclarer boltDeclarer = topologyBuilder.setBolt(id,
- coordinatedBolt, paralel);
-
- if (bolt instanceof IPrepareCommit) {
- boltDeclarer.allGrouping(BatchDef.SPOUT_TRIGGER,
- BatchDef.PREPARE_STREAM_ID);
- }
-
- if (bolt instanceof ICommitter) {
- boltDeclarer.allGrouping(BatchDef.SPOUT_TRIGGER,
- BatchDef.COMMIT_STREAM_ID);
- boltDeclarer.allGrouping(BatchDef.SPOUT_TRIGGER,
- BatchDef.REVERT_STREAM_ID);
- }
-
- if (bolt instanceof IPostCommit) {
- boltDeclarer.allGrouping(BatchDef.SPOUT_TRIGGER,
- BatchDef.POST_STREAM_ID);
- }
-
- return boltDeclarer;
- }
-
- public TopologyBuilder getTopologyBuilder() {
- return topologyBuilder;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IBatchSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IBatchSpout.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IBatchSpout.java
deleted file mode 100644
index d3d1178..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IBatchSpout.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import java.io.Serializable;
-
-import backtype.storm.topology.IBasicBolt;
-
-public interface IBatchSpout extends IBasicBolt, ICommitter, Serializable {
-
- /**
- * input's filed 0 is BatchId
- *
- * execute only receive trigger message
- *
- * do emitBatch operation in execute whose streamID is "batch/compute-stream"
- */
- //void execute(Tuple input, IBasicOutputCollector collector);
- /**
- * begin to ack batchId's data
- *
- * return value will be stored in ZK, so sometimes don't need special action
- *
- * @param id
- */
- //byte[] commit(BatchId id) throws FailedException;
-
- /**
- * begin to revert batchId's data
- *
- * If current task fails to commit batchId, it won't call revert(batchId)
- * If current task fails to revert batchId, JStorm won't call revert again.
- *
- * if not transaction, it can don't care revert
- *
- * @param id
- */
- //void revert(BatchId id, byte[] commitResult);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/ICommitter.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/ICommitter.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/ICommitter.java
deleted file mode 100644
index 9492398..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/ICommitter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import java.io.Serializable;
-
-import backtype.storm.topology.FailedException;
-
-/**
- * The less committer, the state is more stable.
- * Don't need to do
- *
- * @author zhongyan.feng
- * @version
- */
-public interface ICommitter extends Serializable{
- /**
- * begin to commit batchId's data, then return the commit result
- * The commitResult will store into outside storage
- *
- * if failed to commit, please throw FailedException
- *
- *
- *
- * @param id
- */
- byte[] commit(BatchId id) throws FailedException;
-
- /**
- * begin to revert batchId's data
- *
- * If current task fails to commit batchId, it won't call revert(batchId)
- * If current task fails to revert batchId, JStorm won't call revert again.
- *
- * @param id
- */
- void revert(BatchId id, byte[] commitResult);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPostCommit.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPostCommit.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPostCommit.java
deleted file mode 100644
index b408a3f..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPostCommit.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import backtype.storm.topology.BasicOutputCollector;
-
-
-public interface IPostCommit {
- /**
- * Do after commit
- * Don't care failure of postCommit
- *
- * @param id
- */
- void postCommit(BatchId id, BasicOutputCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPrepareCommit.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPrepareCommit.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPrepareCommit.java
deleted file mode 100644
index dd3da44..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/IPrepareCommit.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package com.alibaba.jstorm.batch;
-
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.FailedException;
-
-/**
- * Called before commit, after finish batch
- *
- * @author zhongyan.feng
- */
-public interface IPrepareCommit {
-
- /**
- * Do prepare before commit
- *
- * @param id
- * @param collector
- */
- void prepareCommit(BatchId id, BasicOutputCollector collector) throws FailedException;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutMsgId.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutMsgId.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutMsgId.java
deleted file mode 100644
index 63704fb..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutMsgId.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package com.alibaba.jstorm.batch.impl;
-
-import java.io.Serializable;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.util.BatchStatus;
-
-public class BatchSpoutMsgId implements Serializable{
-
- /** */
- private static final long serialVersionUID = 2899009971479957517L;
-
- private final BatchId batchId;
- private BatchStatus batchStatus;
-
- protected BatchSpoutMsgId(BatchId batchId, BatchStatus batchStatus) {
- this.batchId = batchId;
- this.batchStatus = batchStatus;
- }
-
- public static BatchSpoutMsgId mkInstance() {
- BatchId batchId = BatchId.mkInstance();
- BatchStatus batchStatus = BatchStatus.COMPUTING;
-
- return new BatchSpoutMsgId(batchId, batchStatus);
- }
-
-
- public BatchStatus getBatchStatus() {
- return batchStatus;
- }
-
- public void setBatchStatus(BatchStatus batchStatus) {
- this.batchStatus = batchStatus;
- }
-
- public BatchId getBatchId() {
- return batchId;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutTrigger.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutTrigger.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutTrigger.java
deleted file mode 100644
index d0c3e94..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/BatchSpoutTrigger.java
+++ /dev/null
@@ -1,312 +0,0 @@
-package com.alibaba.jstorm.batch.impl;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.util.BatchCommon;
-import com.alibaba.jstorm.batch.util.BatchDef;
-import com.alibaba.jstorm.batch.util.BatchStatus;
-import com.alibaba.jstorm.client.ConfigExtension;
-import com.alibaba.jstorm.cluster.ClusterState;
-import com.alibaba.jstorm.utils.IntervalCheck;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-/**
- * Strong Sequence
- *
- * @author zhongyan.feng
- * @version
- */
-public class BatchSpoutTrigger implements IRichSpout {
- /** */
- private static final long serialVersionUID = 7215109169247425954L;
-
- private static final Logger LOG = Logger.getLogger(BatchSpoutTrigger.class);
-
- private LinkedBlockingQueue<BatchSpoutMsgId> batchQueue;
-
- private transient ClusterState zkClient;
-
- private transient SpoutOutputCollector collector;
-
- private static final String ZK_NODE_PATH = "/trigger";
-
- private static BatchId currentBatchId = null;
-
- private Map conf;
-
- private String taskName;
-
- private IntervalCheck intervalCheck;
-
- /**
- * @throws Exception
- *
- */
- public void initMsgId() throws Exception {
- Long zkMsgId = null;
- byte[] data = zkClient.get_data(ZK_NODE_PATH, false);
- if (data != null) {
- String value = new String(data);
- try {
- zkMsgId = Long.valueOf(value);
- LOG.info("ZK msgId:" + zkMsgId);
- } catch (Exception e) {
- LOG.warn("Failed to get msgId ", e);
-
- }
-
- }
-
- if (zkMsgId != null) {
- BatchId.updateId(zkMsgId);
- }
-
- int max_spout_pending = JStormUtils.parseInt(
- conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING), 1);
-
- for (int i = 0; i < max_spout_pending; i++) {
- BatchSpoutMsgId msgId = BatchSpoutMsgId.mkInstance();
- if (currentBatchId == null) {
- currentBatchId = msgId.getBatchId();
- }
- batchQueue.offer(msgId);
- LOG.info("Push into queue," + msgId);
- }
-
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- batchQueue = new LinkedBlockingQueue<BatchSpoutMsgId>();
- this.collector = collector;
- this.conf = conf;
- taskName = context.getThisComponentId() + "_" + context.getThisTaskId();
-
- intervalCheck = new IntervalCheck();
-
- try {
- zkClient = BatchCommon.getZkClient(conf);
-
- initMsgId();
-
- } catch (Exception e) {
- LOG.error("", e);
- throw new RuntimeException("Failed to init");
- }
- LOG.info("Successfully open " + taskName);
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void activate() {
- LOG.info("Activate " + taskName);
- }
-
- @Override
- public void deactivate() {
- LOG.info("Deactivate " + taskName);
- }
-
- protected String getStreamId(BatchStatus batchStatus) {
- if (batchStatus == BatchStatus.COMPUTING) {
- return BatchDef.COMPUTING_STREAM_ID;
- } else if (batchStatus == BatchStatus.PREPARE_COMMIT) {
- return BatchDef.PREPARE_STREAM_ID;
- } else if (batchStatus == BatchStatus.COMMIT) {
- return BatchDef.COMMIT_STREAM_ID;
- } else if (batchStatus == BatchStatus.POST_COMMIT) {
- return BatchDef.POST_STREAM_ID;
- } else if (batchStatus == BatchStatus.REVERT_COMMIT) {
- return BatchDef.REVERT_STREAM_ID;
- } else {
- LOG.error("Occur unkonw type BatchStatus " + batchStatus);
- throw new RuntimeException();
- }
- }
-
- protected boolean isCommitStatus(BatchStatus batchStatus) {
- if (batchStatus == BatchStatus.COMMIT) {
- return true;
- } else if (batchStatus == BatchStatus.REVERT_COMMIT) {
- return true;
- } else {
- return false;
- }
- }
-
- protected boolean isCommitWait(BatchSpoutMsgId msgId) {
-
- if (isCommitStatus(msgId.getBatchStatus()) == false) {
- return false;
- }
-
- // left status is commit status
- if (currentBatchId.getId() >= msgId.getBatchId().getId()) {
- return false;
- }
-
- return true;
- }
-
- @Override
- public void nextTuple() {
- BatchSpoutMsgId msgId = null;
- try {
- msgId = batchQueue.poll(10, TimeUnit.MILLISECONDS);
- } catch (InterruptedException e) {
- LOG.error("", e);
- }
- if (msgId == null) {
- return;
- }
-
- if (isCommitWait(msgId)) {
-
- batchQueue.offer(msgId);
- if (intervalCheck.check()) {
- LOG.info("Current msgId " + msgId
- + ", but current commit BatchId is " + currentBatchId);
- }else {
- LOG.debug("Current msgId " + msgId
- + ", but current commit BatchId is " + currentBatchId);
- }
-
- return;
- }
-
- String streamId = getStreamId(msgId.getBatchStatus());
- List<Integer> outTasks = collector.emit(streamId,
- new Values(msgId.getBatchId()), msgId);
- if (outTasks.isEmpty()) {
- forward(msgId);
- }
- return;
-
- }
-
- protected void mkMsgId(BatchSpoutMsgId oldMsgId) {
- synchronized (BatchSpoutMsgId.class) {
- if (currentBatchId.getId() <= oldMsgId.getBatchId().getId()) {
- // this is normal case
-
- byte[] data = String.valueOf(currentBatchId.getId()).getBytes();
- try {
- zkClient.set_data(ZK_NODE_PATH, data);
- } catch (Exception e) {
- LOG.error("Failed to update to ZK " + oldMsgId, e);
- }
-
- currentBatchId = BatchId.incBatchId(oldMsgId.getBatchId());
-
- } else {
- // bigger batchId has been failed, when old msgId finish
- // it will go here
-
- }
-
- }
-
- BatchSpoutMsgId newMsgId = BatchSpoutMsgId.mkInstance();
- batchQueue.offer(newMsgId);
- StringBuilder sb = new StringBuilder();
- sb.append("Create new BatchId,");
- sb.append("old:").append(oldMsgId);
- sb.append("new:").append(newMsgId);
- sb.append("currentBatchId:").append(currentBatchId);
- LOG.info(sb.toString());
- }
-
- protected void forward(BatchSpoutMsgId msgId) {
- BatchStatus status = msgId.getBatchStatus();
-
- BatchStatus newStatus = status.forward();
- if (newStatus == null) {
- // create new status
- mkMsgId(msgId);
- LOG.info("Finish old batch " + msgId);
-
- } else {
- msgId.setBatchStatus(newStatus);
- batchQueue.offer(msgId);
- LOG.info("Forward batch " + msgId);
- }
- }
-
- @Override
- public void ack(Object msgId) {
- if (msgId instanceof BatchSpoutMsgId) {
- forward((BatchSpoutMsgId) msgId);
- return;
- } else {
- LOG.warn("Unknown type msgId " + msgId.getClass().getName() + ":"
- + msgId);
- return;
- }
- }
-
- protected void handleFail(BatchSpoutMsgId msgId) {
- LOG.info("Failed batch " + msgId);
- BatchStatus status = msgId.getBatchStatus();
-
- BatchStatus newStatus = status.error();
- if (newStatus == BatchStatus.ERROR) {
- // create new status
- mkMsgId(msgId);
-
- } else {
-
- msgId.setBatchStatus(newStatus);
- batchQueue.offer(msgId);
-
- }
- }
-
- @Override
- public void fail(Object msgId) {
- if (msgId instanceof BatchSpoutMsgId) {
- handleFail((BatchSpoutMsgId) msgId);
- } else {
- LOG.warn("Unknown type msgId " + msgId.getClass().getName() + ":"
- + msgId);
- return;
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(BatchDef.COMPUTING_STREAM_ID, new Fields(
- "BatchId"));
- declarer.declareStream(BatchDef.PREPARE_STREAM_ID,
- new Fields("BatchId"));
- declarer.declareStream(BatchDef.COMMIT_STREAM_ID, new Fields("BatchId"));
- declarer.declareStream(BatchDef.REVERT_STREAM_ID, new Fields("BatchId"));
- declarer.declareStream(BatchDef.POST_STREAM_ID, new Fields("BatchId"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Map<String, Object> map = new HashMap<String, Object>();
- ConfigExtension.setSpoutSingleThread(map, true);
- return map;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/CoordinatedBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/CoordinatedBolt.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/CoordinatedBolt.java
deleted file mode 100644
index 0f4720b..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/impl/CoordinatedBolt.java
+++ /dev/null
@@ -1,281 +0,0 @@
-package com.alibaba.jstorm.batch.impl;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.List;
-import java.util.Map;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.ReportedFailedException;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.TimeCacheMap;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.ICommitter;
-import com.alibaba.jstorm.batch.IPostCommit;
-import com.alibaba.jstorm.batch.IPrepareCommit;
-import com.alibaba.jstorm.batch.util.BatchCommon;
-import com.alibaba.jstorm.batch.util.BatchDef;
-import com.alibaba.jstorm.batch.util.BatchStatus;
-import com.alibaba.jstorm.cluster.ClusterState;
-
-public class CoordinatedBolt implements IRichBolt {
- private static final long serialVersionUID = 5720810158625748046L;
-
- public static Logger LOG = LoggerFactory.getLogger(CoordinatedBolt.class);
-
- private IBasicBolt delegate;
- private BasicOutputCollector basicCollector;
- private OutputCollector collector;
-
- private String taskId;
- private String taskName;
-
- private boolean isCommiter = false;
- private String zkCommitPath;
- private TimeCacheMap<Object, Object> commited;
-
- public CoordinatedBolt(IBasicBolt delegate) {
-
- this.delegate = delegate;
-
- }
-
- // use static variable to reduce zk connection
- private static ClusterState zkClient = null;
-
- public void mkCommitDir(Map conf) {
-
- try {
- zkClient = BatchCommon.getZkClient(conf);
-
- zkCommitPath = BatchDef.ZK_COMMIT_DIR + BatchDef.ZK_SEPERATOR
- + taskId;
- if (zkClient.node_existed(zkCommitPath, false)) {
- zkClient.delete_node(zkCommitPath);
- }
- zkClient.mkdirs(zkCommitPath);
-
- LOG.info(taskName + " successfully create commit path" + zkCommitPath);
- } catch (Exception e) {
- LOG.error("Failed to create zk node", e);
- throw new RuntimeException();
- }
- }
-
- public void prepare(Map conf, TopologyContext context,
- OutputCollector collector) {
-
- taskId = String.valueOf(context.getThisTaskId());
- taskName = context.getThisComponentId() + "_" + context.getThisTaskId();
-
- this.basicCollector = new BasicOutputCollector(collector);
- this.collector = collector;
-
- if (delegate instanceof ICommitter) {
- isCommiter = true;
- commited = new TimeCacheMap<Object, Object>(
- context.maxTopologyMessageTimeout());
- mkCommitDir(conf);
- }
-
- delegate.prepare(conf, context);
-
- }
-
- public void removeUseless(String path, int reserveSize) throws Exception {
- List<String> childs = zkClient.get_children(path, false);
- Collections.sort(childs, new Comparator<String>() {
-
- @Override
- public int compare(String o1, String o2) {
- try {
- Long v1 = Long.valueOf(o1);
- Long v2 = Long.valueOf(o2);
- return v1.compareTo(v2);
- }catch(Exception e) {
- return o1.compareTo(o2);
- }
-
- }
-
- });
-
- for (int index = 0; index < childs.size() - reserveSize; index++) {
- zkClient.delete_node(path + BatchDef.ZK_SEPERATOR
- + childs.get(index));
- }
- }
-
- public String getCommitPath(BatchId id) {
- return zkCommitPath + BatchDef.ZK_SEPERATOR + id.getId();
- }
-
- public void updateToZk(Object id, byte[] commitResult) {
- try {
-
- removeUseless(zkCommitPath, BatchDef.ZK_COMMIT_RESERVER_NUM);
-
- String path = getCommitPath((BatchId)id);
- byte[] data = commitResult;
- if (data == null) {
- data = new byte[0];
- }
- zkClient.set_data(path, data);
- LOG.info("Update " + path + " to zk");
- } catch (Exception e) {
- LOG.warn("Failed to update to zk,", e);
-
- }
-
- }
-
- public byte[] getCommittedData(Object id) {
- try {
- String path = getCommitPath((BatchId)id);
- byte[] data = zkClient.get_data(path, false);
-
- return data;
- } catch (Exception e) {
- LOG.error("Failed to visit ZK,", e);
- return null;
- }
- }
-
- public void handleRegular(Tuple tuple) {
- basicCollector.setContext(tuple);
- try {
- delegate.execute(tuple, basicCollector);
- collector.ack(tuple);
- } catch (FailedException e) {
- if (e instanceof ReportedFailedException) {
- collector.reportError(e);
- }
- collector.fail(tuple);
- }
-
- }
-
- public void handlePrepareCommit(Tuple tuple) {
- basicCollector.setContext(tuple);
- try {
- BatchId id = (BatchId) tuple.getValue(0);
- ((IPrepareCommit) delegate).prepareCommit(id, basicCollector);
- collector.ack(tuple);
- } catch (FailedException e) {
- if (e instanceof ReportedFailedException) {
- collector.reportError(e);
- }
- collector.fail(tuple);
- }
-
- }
-
- public void handleCommit(Tuple tuple) {
- Object id = tuple.getValue(0);
- try {
- byte[] commitResult = ((ICommitter) delegate).commit((BatchId) id);
-
- collector.ack(tuple);
-
- updateToZk(id, commitResult);
- commited.put(id, commitResult);
- } catch (Exception e) {
- LOG.error("Failed to commit ", e);
- collector.fail(tuple);
- }
- }
-
- public void handleRevert(Tuple tuple) {
- try {
- Object id = tuple.getValue(0);
- byte[] commitResult = null;
-
- if (commited.containsKey(id)) {
- commitResult = (byte[]) commited.get(id);
- } else {
- commitResult = getCommittedData(id);
- }
-
- if (commitResult != null) {
- ((ICommitter) delegate).revert((BatchId) id, commitResult);
- }
- } catch (Exception e) {
- LOG.error("Failed to revert,", e);
- }
-
- collector.ack(tuple);
- }
-
- public void handlePostCommit(Tuple tuple) {
-
- basicCollector.setContext(tuple);
- try {
- BatchId id = (BatchId) tuple.getValue(0);
- ((IPostCommit) delegate).postCommit(id, basicCollector);
-
- } catch (Exception e) {
- LOG.info("Failed to do postCommit,", e);
- }
- collector.ack(tuple);
- }
-
- public void execute(Tuple tuple) {
-
- BatchStatus batchStatus = getBatchStatus(tuple);
-
- if (batchStatus == BatchStatus.COMPUTING) {
- handleRegular(tuple);
- } else if (batchStatus == BatchStatus.PREPARE_COMMIT) {
- handlePrepareCommit(tuple);
- } else if (batchStatus == BatchStatus.COMMIT) {
- handleCommit(tuple);
- } else if (batchStatus == BatchStatus.REVERT_COMMIT) {
- handleRevert(tuple);
- } else if (batchStatus == BatchStatus.POST_COMMIT) {
- handlePostCommit(tuple);
- } else {
- throw new RuntimeException(
- "Receive commit tuple, but not committer");
- }
- }
-
- public void cleanup() {
- delegate.cleanup();
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- delegate.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return delegate.getComponentConfiguration();
- }
-
- private BatchStatus getBatchStatus(Tuple tuple) {
- String streamId = tuple.getSourceStreamId();
-
- if (streamId.equals(BatchDef.PREPARE_STREAM_ID)) {
- return BatchStatus.PREPARE_COMMIT;
- } else if (streamId.equals(BatchDef.COMMIT_STREAM_ID)) {
- return BatchStatus.COMMIT;
- } else if (streamId.equals(BatchDef.REVERT_STREAM_ID)) {
- return BatchStatus.REVERT_COMMIT;
- } else if (streamId.equals(BatchDef.POST_STREAM_ID)) {
- return BatchStatus.POST_COMMIT;
- } else {
- return BatchStatus.COMPUTING;
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchCommon.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchCommon.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchCommon.java
deleted file mode 100644
index f99edfa..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchCommon.java
+++ /dev/null
@@ -1,65 +0,0 @@
-package com.alibaba.jstorm.batch.util;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-
-import com.alibaba.jstorm.cluster.ClusterState;
-import com.alibaba.jstorm.cluster.DistributedClusterState;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-public class BatchCommon {
- private static final Logger LOG = Logger.getLogger(BatchCommon.class);
-
- private static ClusterState zkClient = null;
-
- public static ClusterState getZkClient(Map conf) throws Exception {
- synchronized (BatchCommon.class) {
- if (zkClient != null) {
- return zkClient;
- }
-
- List<String> zkServers = null;
- if (conf.get(Config.TRANSACTIONAL_ZOOKEEPER_SERVERS) != null) {
- zkServers = (List<String>) conf
- .get(Config.TRANSACTIONAL_ZOOKEEPER_SERVERS);
- } else if (conf.get(Config.STORM_ZOOKEEPER_SERVERS) != null) {
- zkServers = (List<String>) conf
- .get(Config.STORM_ZOOKEEPER_SERVERS);
- } else {
- throw new RuntimeException("No setting zk");
- }
-
- int port = 2181;
- if (conf.get(Config.TRANSACTIONAL_ZOOKEEPER_PORT) != null) {
- port = JStormUtils.parseInt(
- conf.get(Config.TRANSACTIONAL_ZOOKEEPER_PORT), 2181);
- } else if (conf.get(Config.STORM_ZOOKEEPER_PORT) != null) {
- port = JStormUtils.parseInt(
- conf.get(Config.STORM_ZOOKEEPER_PORT), 2181);
- }
-
- String root = BatchDef.BATCH_ZK_ROOT;
- if (conf.get(Config.TRANSACTIONAL_ZOOKEEPER_ROOT) != null) {
- root = (String) conf.get(Config.TRANSACTIONAL_ZOOKEEPER_ROOT);
- }
-
- root = root + BatchDef.ZK_SEPERATOR
- + conf.get(Config.TOPOLOGY_NAME);
-
- Map<Object, Object> tmpConf = new HashMap<Object, Object>();
- tmpConf.putAll(conf);
- tmpConf.put(Config.STORM_ZOOKEEPER_SERVERS, zkServers);
- tmpConf.put(Config.STORM_ZOOKEEPER_ROOT, root);
- zkClient = new DistributedClusterState(tmpConf);
-
- LOG.info("Successfully connect ZK");
- return zkClient;
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchDef.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchDef.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchDef.java
deleted file mode 100644
index 7d87cfa..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchDef.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.alibaba.jstorm.batch.util;
-
-
-public class BatchDef {
- public static final String COMPUTING_STREAM_ID = "batch/compute-stream";
-
- public static final String PREPARE_STREAM_ID = "batch/parepare-stream";
-
- public static final String COMMIT_STREAM_ID = "batch/commit-stream";
-
- public static final String REVERT_STREAM_ID = "batch/revert-stream";
-
- public static final String POST_STREAM_ID = "batch/post-stream";
-
- public static final String SPOUT_TRIGGER = "spout_trigger";
-
- public static final String BATCH_ZK_ROOT = "batch";
-
- public static final String ZK_COMMIT_DIR = "/commit";
-
- public static final int ZK_COMMIT_RESERVER_NUM = 3;
-
- public static final String ZK_SEPERATOR = "/";
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchStatus.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchStatus.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchStatus.java
deleted file mode 100644
index e02daeb..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/batch/util/BatchStatus.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package com.alibaba.jstorm.batch.util;
-
-public enum BatchStatus {
- COMPUTING,
- PREPARE_COMMIT,
- COMMIT,
- REVERT_COMMIT,
- POST_COMMIT,
- ERROR;
-
-
-
- public BatchStatus forward() {
- if (this == COMPUTING) {
- return PREPARE_COMMIT;
- }else if (this == PREPARE_COMMIT) {
- return COMMIT;
- }else if (this == COMMIT) {
- return POST_COMMIT;
- }else {
- return null;
- }
- }
-
- public BatchStatus error() {
- if (this == COMMIT) {
- return REVERT_COMMIT;
- }else {
- return ERROR;
- }
- }
-
-};
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopDefaultKill.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopDefaultKill.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopDefaultKill.java
deleted file mode 100644
index f392f22..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopDefaultKill.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-
-/**
- * Killer callback
- *
- * @author yannian
- *
- */
-
-public class AsyncLoopDefaultKill extends RunnableCallback {
-
- @Override
- public <T> Object execute(T... args) {
- Exception e = (Exception) args[0];
- JStormUtils.halt_process(1, "Async loop died!");
- return e;
- }
-
- @Override
- public void run() {
- JStormUtils.halt_process(1, "Async loop died!");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopRunnable.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopRunnable.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopRunnable.java
deleted file mode 100644
index 1b1e588..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopRunnable.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import org.apache.log4j.Logger;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-
-/**
- * AsyncLoopThread 's runnable
- *
- * The class wrapper RunnableCallback fn, if occur exception, run killfn
- *
- * @author yannian
- *
- */
-public class AsyncLoopRunnable implements Runnable {
- private static Logger LOG = Logger.getLogger(AsyncLoopRunnable.class);
-
- private RunnableCallback fn;
- private RunnableCallback killfn;
-
- public AsyncLoopRunnable(RunnableCallback fn, RunnableCallback killfn) {
- this.fn = fn;
- this.killfn = killfn;
- }
-
- private boolean needQuit(Object rtn) {
- if (rtn != null) {
- long sleepTime = Long.parseLong(String.valueOf(rtn));
- if (sleepTime < 0) {
- return true;
- }else if (sleepTime > 0) {
- JStormUtils.sleepMs(sleepTime * 1000);
- }
- }
- return false;
- }
-
- @Override
- public void run() {
-
- try {
- while (true) {
- Exception e = null;
-
- try {
- if (fn == null) {
- LOG.warn("fn==null");
- throw new RuntimeException("AsyncLoopRunnable no core function ");
- }
-
- fn.run();
-
- e = fn.error();
-
- } catch (Exception ex) {
- e = ex;
- }
- if (e != null) {
- fn.shutdown();
- throw e;
- }
- Object rtn = fn.getResult();
- if (this.needQuit(rtn)) {
- return;
- }
-
- }
- } catch (InterruptedException e) {
- LOG.info("Async loop interrupted!");
- } catch (Throwable e) {
- Object rtn = fn.getResult();
- if (this.needQuit(rtn)) {
- return;
- }else {
- LOG.error("Async loop died!", e);
- killfn.execute(e);
- }
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopThread.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopThread.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopThread.java
deleted file mode 100644
index 534386d..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/AsyncLoopThread.java
+++ /dev/null
@@ -1,122 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import java.lang.Thread.UncaughtExceptionHandler;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.utils.Time;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.jstorm.utils.SmartThread;
-
-/**
- * Wrapper Timer thread Every several seconds execute afn, if something is run,
- * run kill_fn
- *
- *
- * @author yannian
- *
- */
-public class AsyncLoopThread implements SmartThread {
- private static final Logger LOG = Logger.getLogger(AsyncLoopThread.class);
-
- private Thread thread;
-
- private RunnableCallback afn;
-
- public AsyncLoopThread(RunnableCallback afn) {
- this.init(afn, false, Thread.NORM_PRIORITY, true);
- }
-
- public AsyncLoopThread(RunnableCallback afn, boolean daemon, int priority,
- boolean start) {
- this.init(afn, daemon, priority, start);
- }
-
- public AsyncLoopThread(RunnableCallback afn, boolean daemon,
- RunnableCallback kill_fn, int priority, boolean start) {
- this.init(afn, daemon, kill_fn, priority, start);
- }
-
- public void init(RunnableCallback afn, boolean daemon, int priority,
- boolean start) {
- RunnableCallback kill_fn = new AsyncLoopDefaultKill();
- this.init(afn, daemon, kill_fn, priority, start);
- }
-
- /**
- *
- * @param afn
- * @param daemon
- * @param kill_fn
- * (Exception e)
- * @param priority
- * @param args_fn
- * @param start
- */
- private void init(RunnableCallback afn, boolean daemon,
- RunnableCallback kill_fn, int priority, boolean start) {
- if (kill_fn == null) {
- kill_fn = new AsyncLoopDefaultKill();
- }
-
- Runnable runable = new AsyncLoopRunnable(afn, kill_fn);
- thread = new Thread(runable);
- String threadName = afn.getThreadName();
- if (threadName == null) {
- threadName = afn.getClass().getSimpleName();
- }
- thread.setName(threadName);
- thread.setDaemon(daemon);
- thread.setPriority(priority);
- thread.setUncaughtExceptionHandler(new UncaughtExceptionHandler() {
- @Override
- public void uncaughtException(Thread t, Throwable e) {
- LOG.error("UncaughtException", e);
- JStormUtils.halt_process(1, "UncaughtException");
- }
- });
-
- this.afn = afn;
-
- if (start) {
- thread.start();
- }
-
- }
-
- @Override
- public void start() {
- thread.start();
- }
-
- @Override
- public void join() throws InterruptedException {
- thread.join();
- }
-
- // for test
- public void join(int times) throws InterruptedException {
- thread.join(times);
- }
-
- @Override
- public void interrupt() {
- thread.interrupt();
- }
-
- @Override
- public Boolean isSleeping() {
- return Time.isThreadWaiting(thread);
- }
-
- public Thread getThread() {
- return thread;
- }
-
- @Override
- public void cleanup() {
- // TODO Auto-generated method stub
- afn.cleanup();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/BaseCallback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/BaseCallback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/BaseCallback.java
deleted file mode 100644
index 31012fa..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/BaseCallback.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import com.alibaba.jstorm.callback.Callback;
-
-public class BaseCallback implements Callback {
-
- @Override
- public <T> Object execute(T... args) {
- // TODO Auto-generated method stub
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/Callback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/Callback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/Callback.java
deleted file mode 100644
index d832a71..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/Callback.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-/**
- * Callback interface
- *
- * @author lixin 2012-3-12
- *
- */
-public interface Callback {
-
- public <T> Object execute(T... args);
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/ClusterStateCallback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/ClusterStateCallback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/ClusterStateCallback.java
deleted file mode 100644
index 2726b1d..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/ClusterStateCallback.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import com.alibaba.jstorm.callback.BaseCallback;
-
-public class ClusterStateCallback extends BaseCallback {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/DefaultWatcherCallBack.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/DefaultWatcherCallBack.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/DefaultWatcherCallBack.java
deleted file mode 100644
index bd4ce25..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/DefaultWatcherCallBack.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import org.apache.log4j.Logger;
-import org.apache.zookeeper.Watcher.Event.EventType;
-import org.apache.zookeeper.Watcher.Event.KeeperState;
-
-import com.alibaba.jstorm.zk.ZkEventTypes;
-import com.alibaba.jstorm.zk.ZkKeeperStates;
-
-/**
- * Default ZK watch callback
- *
- * @author yannian
- *
- */
-public class DefaultWatcherCallBack implements WatcherCallBack {
-
- private static Logger LOG = Logger.getLogger(DefaultWatcherCallBack.class);
-
- @Override
- public void execute(KeeperState state, EventType type, String path) {
- LOG.info("Zookeeper state update:" + ZkKeeperStates.getStateName(state)
- + "," + ZkEventTypes.getStateName(type) + "," + path);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/RunnableCallback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/RunnableCallback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/RunnableCallback.java
deleted file mode 100644
index ccee6e2..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/RunnableCallback.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-
-/**
- * Base Runnable/Callback function
- *
- * @author yannian
- *
- */
-public class RunnableCallback implements Runnable, Callback {
-
- @Override
- public <T> Object execute(T... args) {
- return null;
- }
-
- @Override
- public void run() {
-
- }
-
- public Exception error() {
- return null;
- }
-
- public Object getResult() {
- return null;
- }
-
- /**
- * Called by exception
- */
- public void shutdown() {
- }
-
- /**
- * Normal quit
- */
- public void cleanup() {
-
- }
-
- public String getThreadName() {
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/WatcherCallBack.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/WatcherCallBack.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/WatcherCallBack.java
deleted file mode 100644
index 382cc4b..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/callback/WatcherCallBack.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package com.alibaba.jstorm.callback;
-
-import org.apache.zookeeper.Watcher.Event.EventType;
-import org.apache.zookeeper.Watcher.Event.KeeperState;
-
-public interface WatcherCallBack {
- public void execute(KeeperState state, EventType type, String path);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/ConfigExtension.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/ConfigExtension.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/ConfigExtension.java
deleted file mode 100644
index f71a4ce..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/ConfigExtension.java
+++ /dev/null
@@ -1,642 +0,0 @@
-package com.alibaba.jstorm.client;
-
-import java.security.InvalidParameterException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-
-public class ConfigExtension {
- /**
- * if this configure has been set, the spout or bolt will log all receive
- * tuples
- *
- * topology.debug just for logging all sent tuples
- */
- protected static final String TOPOLOGY_DEBUG_RECV_TUPLE = "topology.debug.recv.tuple";
-
- public static void setTopologyDebugRecvTuple(Map conf, boolean debug) {
- conf.put(TOPOLOGY_DEBUG_RECV_TUPLE, Boolean.valueOf(debug));
- }
-
- public static Boolean isTopologyDebugRecvTuple(Map conf) {
- return JStormUtils.parseBoolean(conf.get(TOPOLOGY_DEBUG_RECV_TUPLE),
- false);
- }
-
- /**
- * port number of deamon httpserver server
- */
- private static final Integer DEFAULT_DEAMON_HTTPSERVER_PORT = 7621;
-
- protected static final String SUPERVISOR_DEAMON_HTTPSERVER_PORT = "supervisor.deamon.logview.port";
-
- public static Integer getSupervisorDeamonHttpserverPort(Map conf) {
- return JStormUtils.parseInt(
- conf.get(SUPERVISOR_DEAMON_HTTPSERVER_PORT),
- DEFAULT_DEAMON_HTTPSERVER_PORT + 1);
- }
-
- protected static final String NIMBUS_DEAMON_HTTPSERVER_PORT = "nimbus.deamon.logview.port";
-
- public static Integer getNimbusDeamonHttpserverPort(Map conf) {
- return JStormUtils.parseInt(conf.get(NIMBUS_DEAMON_HTTPSERVER_PORT),
- DEFAULT_DEAMON_HTTPSERVER_PORT);
- }
-
- /**
- * Worker gc parameter
- *
- *
- */
- protected static final String WORKER_GC_CHILDOPTS = "worker.gc.childopts";
-
- public static void setWorkerGc(Map conf, String gc) {
- conf.put(WORKER_GC_CHILDOPTS, gc);
- }
-
- public static String getWorkerGc(Map conf) {
- return (String) conf.get(WORKER_GC_CHILDOPTS);
- }
-
- protected static final String WOREKER_REDIRECT_OUTPUT = "worker.redirect.output";
-
- public static boolean getWorkerRedirectOutput(Map conf) {
- Object result = conf.get(WOREKER_REDIRECT_OUTPUT);
- if (result == null)
- return true;
- return (Boolean) result;
- }
-
- protected static final String WOREKER_REDIRECT_OUTPUT_FILE = "worker.redirect.output.file";
-
- public static void setWorkerRedirectOutputFile(Map conf, String outputPath) {
- conf.put(WOREKER_REDIRECT_OUTPUT_FILE, outputPath);
- }
-
- public static String getWorkerRedirectOutputFile(Map conf) {
- return (String)conf.get(WOREKER_REDIRECT_OUTPUT_FILE);
- }
-
- /**
- * Usually, spout finish prepare before bolt, so spout need wait several
- * seconds so that bolt finish preparation
- *
- * By default, the setting is 30 seconds
- */
- protected static final String SPOUT_DELAY_RUN = "spout.delay.run";
-
- public static void setSpoutDelayRunSeconds(Map conf, int delay) {
- conf.put(SPOUT_DELAY_RUN, Integer.valueOf(delay));
- }
-
- public static int getSpoutDelayRunSeconds(Map conf) {
- return JStormUtils.parseInt(conf.get(SPOUT_DELAY_RUN), 30);
- }
-
- /**
- * Default ZMQ Pending queue size
- */
- public static final int DEFAULT_ZMQ_MAX_QUEUE_MSG = 1000;
-
- /**
- * One task will alloc how many memory slot, the default setting is 1
- */
- protected static final String MEM_SLOTS_PER_TASK = "memory.slots.per.task";
-
- @Deprecated
- public static void setMemSlotPerTask(Map conf, int slotNum) {
- if (slotNum < 1) {
- throw new InvalidParameterException();
- }
- conf.put(MEM_SLOTS_PER_TASK, Integer.valueOf(slotNum));
- }
-
- /**
- * One task will use cpu slot number, the default setting is 1
- */
- protected static final String CPU_SLOTS_PER_TASK = "cpu.slots.per.task";
-
- @Deprecated
- public static void setCpuSlotsPerTask(Map conf, int slotNum) {
- if (slotNum < 1) {
- throw new InvalidParameterException();
- }
- conf.put(CPU_SLOTS_PER_TASK, Integer.valueOf(slotNum));
- }
-
- /**
- * if the setting has been set, the component's task must run different node
- * This is conflict with USE_SINGLE_NODE
- */
- protected static final String TASK_ON_DIFFERENT_NODE = "task.on.differ.node";
-
- public static void setTaskOnDifferentNode(Map conf, boolean isIsolate) {
- conf.put(TASK_ON_DIFFERENT_NODE, Boolean.valueOf(isIsolate));
- }
-
- public static boolean isTaskOnDifferentNode(Map conf) {
- return JStormUtils
- .parseBoolean(conf.get(TASK_ON_DIFFERENT_NODE), false);
- }
-
- protected static final String SUPERVISOR_ENABLE_CGROUP = "supervisor.enable.cgroup";
-
- public static boolean isEnableCgroup(Map conf) {
- return JStormUtils.parseBoolean(conf.get(SUPERVISOR_ENABLE_CGROUP),
- false);
- }
-
- /**
- * If component or topology configuration set "use.old.assignment", will try
- * use old assignment firstly
- */
- protected static final String USE_OLD_ASSIGNMENT = "use.old.assignment";
-
- public static void setUseOldAssignment(Map conf, boolean useOld) {
- conf.put(USE_OLD_ASSIGNMENT, Boolean.valueOf(useOld));
- }
-
- public static boolean isUseOldAssignment(Map conf) {
- return JStormUtils.parseBoolean(conf.get(USE_OLD_ASSIGNMENT), false);
- }
-
- /**
- * The supervisor's hostname
- */
- protected static final String SUPERVISOR_HOSTNAME = "supervisor.hostname";
- public static final Object SUPERVISOR_HOSTNAME_SCHEMA = String.class;
-
- public static String getSupervisorHost(Map conf) {
- return (String) conf.get(SUPERVISOR_HOSTNAME);
- }
-
- protected static final String SUPERVISOR_USE_IP = "supervisor.use.ip";
-
- public static boolean isSupervisorUseIp(Map conf) {
- return JStormUtils.parseBoolean(conf.get(SUPERVISOR_USE_IP), false);
- }
-
- protected static final String NIMBUS_USE_IP = "nimbus.use.ip";
-
- public static boolean isNimbusUseIp(Map conf) {
- return JStormUtils.parseBoolean(conf.get(NIMBUS_USE_IP), false);
- }
-
- protected static final String TOPOLOGY_ENABLE_CLASSLOADER = "topology.enable.classloader";
-
- public static boolean isEnableTopologyClassLoader(Map conf) {
- return JStormUtils.parseBoolean(conf.get(TOPOLOGY_ENABLE_CLASSLOADER),
- false);
- }
-
- public static void setEnableTopologyClassLoader(Map conf, boolean enable) {
- conf.put(TOPOLOGY_ENABLE_CLASSLOADER, Boolean.valueOf(enable));
- }
-
- protected static String CLASSLOADER_DEBUG = "classloader.debug";
-
- public static boolean isEnableClassloaderDebug(Map conf) {
- return JStormUtils.parseBoolean(conf.get(CLASSLOADER_DEBUG), false);
- }
-
- public static void setEnableClassloaderDebug(Map conf, boolean enable) {
- conf.put(CLASSLOADER_DEBUG, enable);
- }
-
- protected static final String CONTAINER_NIMBUS_HEARTBEAT = "container.nimbus.heartbeat";
-
- /**
- * Get to know whether nimbus is run under Apsara/Yarn container
- *
- * @param conf
- * @return
- */
- public static boolean isEnableContainerNimbus() {
- String path = System.getenv(CONTAINER_NIMBUS_HEARTBEAT);
-
- if (StringUtils.isBlank(path)) {
- return false;
- } else {
- return true;
- }
- }
-
- /**
- * Get Apsara/Yarn nimbus container's hearbeat dir
- *
- * @param conf
- * @return
- */
- public static String getContainerNimbusHearbeat() {
- return System.getenv(CONTAINER_NIMBUS_HEARTBEAT);
- }
-
- protected static final String CONTAINER_SUPERVISOR_HEARTBEAT = "container.supervisor.heartbeat";
-
- /**
- * Get to know whether supervisor is run under Apsara/Yarn supervisor
- * container
- *
- * @param conf
- * @return
- */
- public static boolean isEnableContainerSupervisor() {
- String path = System.getenv(CONTAINER_SUPERVISOR_HEARTBEAT);
-
- if (StringUtils.isBlank(path)) {
- return false;
- } else {
- return true;
- }
- }
-
- /**
- * Get Apsara/Yarn supervisor container's hearbeat dir
- *
- * @param conf
- * @return
- */
- public static String getContainerSupervisorHearbeat() {
- return (String) System.getenv(CONTAINER_SUPERVISOR_HEARTBEAT);
- }
-
- protected static final String CONTAINER_HEARTBEAT_TIMEOUT_SECONDS = "container.heartbeat.timeout.seconds";
-
- public static int getContainerHeartbeatTimeoutSeconds(Map conf) {
- return JStormUtils.parseInt(
- conf.get(CONTAINER_HEARTBEAT_TIMEOUT_SECONDS), 240);
- }
-
- protected static final String CONTAINER_HEARTBEAT_FREQUENCE = "container.heartbeat.frequence";
-
- public static int getContainerHeartbeatFrequence(Map conf) {
- return JStormUtils
- .parseInt(conf.get(CONTAINER_HEARTBEAT_FREQUENCE), 10);
- }
-
- protected static final String JAVA_SANDBOX_ENABLE = "java.sandbox.enable";
-
- public static boolean isJavaSandBoxEnable(Map conf) {
- return JStormUtils.parseBoolean(conf.get(JAVA_SANDBOX_ENABLE), false);
- }
-
- protected static String SPOUT_SINGLE_THREAD = "spout.single.thread";
-
- public static boolean isSpoutSingleThread(Map conf) {
- return JStormUtils.parseBoolean(conf.get(SPOUT_SINGLE_THREAD), false);
- }
-
- public static void setSpoutSingleThread(Map conf, boolean enable) {
- conf.put(SPOUT_SINGLE_THREAD, enable);
- }
-
- protected static String WORKER_STOP_WITHOUT_SUPERVISOR = "worker.stop.without.supervisor";
-
- public static boolean isWorkerStopWithoutSupervisor(Map conf) {
- return JStormUtils.parseBoolean(
- conf.get(WORKER_STOP_WITHOUT_SUPERVISOR), false);
- }
-
- protected static String CGROUP_ROOT_DIR = "supervisor.cgroup.rootdir";
-
- public static String getCgroupRootDir(Map conf) {
- return (String) conf.get(CGROUP_ROOT_DIR);
- }
-
- protected static String NETTY_TRANSFER_ASYNC_AND_BATCH = "storm.messaging.netty.transfer.async.batch";
-
- public static boolean isNettyTransferAsyncBatch(Map conf) {
- return JStormUtils.parseBoolean(
- conf.get(NETTY_TRANSFER_ASYNC_AND_BATCH), true);
- }
-
- protected static final String USE_USERDEFINE_ASSIGNMENT = "use.userdefine.assignment";
-
- public static void setUserDefineAssignment(Map conf,
- List<WorkerAssignment> userDefines) {
- List<String> ret = new ArrayList<String>();
- for (WorkerAssignment worker : userDefines) {
- ret.add(Utils.to_json(worker));
- }
- conf.put(USE_USERDEFINE_ASSIGNMENT, ret);
- }
-
- public static List<WorkerAssignment> getUserDefineAssignment(Map conf) {
- List<WorkerAssignment> ret = new ArrayList<WorkerAssignment>();
- if (conf.get(USE_USERDEFINE_ASSIGNMENT) == null)
- return ret;
- for (String worker : (List<String>) conf.get(USE_USERDEFINE_ASSIGNMENT)) {
- ret.add(WorkerAssignment.parseFromObj(Utils.from_json(worker)));
- }
- return ret;
- }
-
- protected static final String MEMSIZE_PER_WORKER = "worker.memory.size";
-
- public static void setMemSizePerWorker(Map conf, long memSize) {
- conf.put(MEMSIZE_PER_WORKER, memSize);
- }
-
- public static void setMemSizePerWorkerByKB(Map conf, long memSize) {
- long size = memSize * 1024l;
- setMemSizePerWorker(conf, size);
- }
-
- public static void setMemSizePerWorkerByMB(Map conf, long memSize) {
- long size = memSize * 1024l;
- setMemSizePerWorkerByKB(conf, size);
- }
-
- public static void setMemSizePerWorkerByGB(Map conf, long memSize) {
- long size = memSize * 1024l;
- setMemSizePerWorkerByMB(conf, size);
- }
-
- public static long getMemSizePerWorker(Map conf) {
- long size = JStormUtils.parseLong(conf.get(MEMSIZE_PER_WORKER),
- JStormUtils.SIZE_1_G * 2);
- return size > 0 ? size : JStormUtils.SIZE_1_G * 2;
- }
-
- protected static final String CPU_SLOT_PER_WORKER = "worker.cpu.slot.num";
-
- public static void setCpuSlotNumPerWorker(Map conf, int slotNum) {
- conf.put(CPU_SLOT_PER_WORKER, slotNum);
- }
-
- public static int getCpuSlotPerWorker(Map conf) {
- int slot = JStormUtils.parseInt(conf.get(CPU_SLOT_PER_WORKER), 1);
- return slot > 0 ? slot : 1;
- }
-
- protected static String TOPOLOGY_PERFORMANCE_METRICS = "topology.performance.metrics";
-
- public static boolean isEnablePerformanceMetrics(Map conf) {
- return JStormUtils.parseBoolean(conf.get(TOPOLOGY_PERFORMANCE_METRICS),
- true);
- }
-
- public static void setPerformanceMetrics(Map conf, boolean isEnable) {
- conf.put(TOPOLOGY_PERFORMANCE_METRICS, isEnable);
- }
-
- protected static String NETTY_BUFFER_THRESHOLD_SIZE = "storm.messaging.netty.buffer.threshold";
-
- public static long getNettyBufferThresholdSize(Map conf) {
- return JStormUtils.parseLong(conf.get(NETTY_BUFFER_THRESHOLD_SIZE),
- 8 *JStormUtils.SIZE_1_M);
- }
-
- public static void setNettyBufferThresholdSize(Map conf, long size) {
- conf.put(NETTY_BUFFER_THRESHOLD_SIZE, size);
- }
-
- protected static String NETTY_MAX_SEND_PENDING = "storm.messaging.netty.max.pending";
-
- public static void setNettyMaxSendPending(Map conf, long pending) {
- conf.put(NETTY_MAX_SEND_PENDING, pending);
- }
-
- public static long getNettyMaxSendPending(Map conf) {
- return JStormUtils.parseLong(conf.get(NETTY_MAX_SEND_PENDING), 16);
- }
-
- protected static String DISRUPTOR_USE_SLEEP = "disruptor.use.sleep";
-
- public static boolean isDisruptorUseSleep(Map conf) {
- return JStormUtils.parseBoolean(conf.get(DISRUPTOR_USE_SLEEP), true);
- }
-
- public static void setDisruptorUseSleep(Map conf, boolean useSleep) {
- conf.put(DISRUPTOR_USE_SLEEP, useSleep);
- }
-
- public static boolean isTopologyContainAcker(Map conf) {
- int num = JStormUtils.parseInt(conf.get(Config.TOPOLOGY_ACKER_EXECUTORS), 1);
- if (num > 0) {
- return true;
- }else {
- return false;
- }
- }
-
- protected static String NETTY_SYNC_MODE = "storm.messaging.netty.sync.mode";
-
- public static boolean isNettySyncMode(Map conf) {
- return JStormUtils.parseBoolean(conf.get(NETTY_SYNC_MODE), false);
- }
-
- public static void setNettySyncMode(Map conf, boolean sync) {
- conf.put(NETTY_SYNC_MODE, sync);
- }
-
- protected static String NETTY_ASYNC_BLOCK = "storm.messaging.netty.async.block";
- public static boolean isNettyASyncBlock(Map conf) {
- return JStormUtils.parseBoolean(conf.get(NETTY_ASYNC_BLOCK), true);
- }
-
- public static void setNettyASyncBlock(Map conf, boolean block) {
- conf.put(NETTY_ASYNC_BLOCK, block);
- }
-
- protected static String ALIMONITOR_METRICS_POST = "topology.alimonitor.metrics.post";
-
- public static boolean isAlimonitorMetricsPost(Map conf) {
- return JStormUtils.parseBoolean(conf.get(ALIMONITOR_METRICS_POST), true);
- }
-
- public static void setAlimonitorMetricsPost(Map conf, boolean post) {
- conf.put(ALIMONITOR_METRICS_POST, post);
- }
-
- protected static String TASK_CLEANUP_TIMEOUT_SEC = "task.cleanup.timeout.sec";
-
- public static int getTaskCleanupTimeoutSec(Map conf) {
- return JStormUtils.parseInt(conf.get(TASK_CLEANUP_TIMEOUT_SEC), 10);
- }
-
- public static void setTaskCleanupTimeoutSec(Map conf, int timeout) {
- conf.put(TASK_CLEANUP_TIMEOUT_SEC, timeout);
- }
-
- protected static String UI_CLUSTERS = "ui.clusters";
- protected static String UI_CLUSTER_NAME = "name";
- protected static String UI_CLUSTER_ZK_ROOT = "zkRoot";
- protected static String UI_CLUSTER_ZK_SERVERS = "zkServers";
- protected static String UI_CLUSTER_ZK_PORT = "zkPort";
-
- public static List<Map> getUiClusters(Map conf) {
- return (List<Map>) conf.get(UI_CLUSTERS);
- }
-
- public static void setUiClusters(Map conf, List<Map> uiClusters) {
- conf.put(UI_CLUSTERS, uiClusters);
- }
-
- public static Map getUiClusterInfo(List<Map> uiClusters, String name) {
- Map ret = null;
- for (Map cluster : uiClusters) {
- String clusterName = getUiClusterName(cluster);
- if (clusterName.equals(name)) {
- ret = cluster;
- break;
- }
- }
-
- return ret;
- }
-
- public static String getUiClusterName(Map uiCluster) {
- return (String) uiCluster.get(UI_CLUSTER_NAME);
- }
-
- public static String getUiClusterZkRoot(Map uiCluster) {
- return (String) uiCluster.get(UI_CLUSTER_ZK_ROOT);
- }
-
- public static List<String> getUiClusterZkServers(Map uiCluster) {
- return (List<String>) uiCluster.get(UI_CLUSTER_ZK_SERVERS);
- }
-
- public static Integer getUiClusterZkPort(Map uiCluster) {
- return JStormUtils.parseInt(uiCluster.get(UI_CLUSTER_ZK_PORT));
- }
-
- protected static String SPOUT_PEND_FULL_SLEEP = "spout.pending.full.sleep";
-
- public static boolean isSpoutPendFullSleep(Map conf) {
- return JStormUtils.parseBoolean(conf.get(SPOUT_PEND_FULL_SLEEP), false);
- }
-
- public static void setSpoutPendFullSleep(Map conf, boolean sleep) {
- conf.put(SPOUT_PEND_FULL_SLEEP, sleep);
-
- }
-
- protected static String LOGVIEW_ENCODING = "supervisor.deamon.logview.encoding";
- protected static String UTF8 = "utf-8";
-
- public static String getLogViewEncoding(Map conf) {
- String ret = (String) conf.get(LOGVIEW_ENCODING);
- if (ret == null) ret = UTF8;
- return ret;
- }
-
- public static void setLogViewEncoding(Map conf, String enc) {
- conf.put(LOGVIEW_ENCODING, enc);
- }
-
- public static String TASK_STATUS_ACTIVE = "Active";
- public static String TASK_STATUS_STARTING = "Starting";
-
- protected static String ALIMONITOR_TOPO_METIRC_NAME = "topology.alimonitor.topo.metrics.name";
- protected static String ALIMONITOR_TASK_METIRC_NAME = "topology.alimonitor.task.metrics.name";
- protected static String ALIMONITOR_WORKER_METIRC_NAME = "topology.alimonitor.worker.metrics.name";
- protected static String ALIMONITOR_USER_METIRC_NAME = "topology.alimonitor.user.metrics.name";
-
- public static String getAlmonTopoMetricName(Map conf) {
- return (String) conf.get(ALIMONITOR_TOPO_METIRC_NAME);
- }
-
- public static String getAlmonTaskMetricName(Map conf) {
- return (String) conf.get(ALIMONITOR_TASK_METIRC_NAME);
- }
-
- public static String getAlmonWorkerMetricName(Map conf) {
- return (String) conf.get(ALIMONITOR_WORKER_METIRC_NAME);
- }
-
- public static String getAlmonUserMetricName(Map conf) {
- return (String) conf.get(ALIMONITOR_USER_METIRC_NAME);
- }
-
- protected static String SPOUT_PARALLELISM = "topology.spout.parallelism";
- protected static String BOLT_PARALLELISM = "topology.bolt.parallelism";
-
- public static Integer getSpoutParallelism(Map conf, String componentName) {
- Integer ret = null;
- Map<String, String> map = (Map<String, String>)(conf.get(SPOUT_PARALLELISM));
- if(map != null) ret = JStormUtils.parseInt(map.get(componentName));
- return ret;
- }
-
- public static Integer getBoltParallelism(Map conf, String componentName) {
- Integer ret = null;
- Map<String, String> map = (Map<String, String>)(conf.get(BOLT_PARALLELISM));
- if(map != null) ret = JStormUtils.parseInt(map.get(componentName));
- return ret;
- }
-
- protected static String TOPOLOGY_BUFFER_SIZE_LIMITED = "topology.buffer.size.limited";
-
- public static void setTopologyBufferSizeLimited(Map conf, boolean limited) {
- conf.put(TOPOLOGY_BUFFER_SIZE_LIMITED, limited);
- }
-
- public static boolean getTopologyBufferSizeLimited(Map conf) {
- boolean isSynchronized = isNettySyncMode(conf);
- if (isSynchronized == true) {
- return true;
- }
-
- return JStormUtils.parseBoolean(conf.get(TOPOLOGY_BUFFER_SIZE_LIMITED), true);
-
- }
-
- protected static String SUPERVISOR_SLOTS_PORTS_BASE = "supervisor.slots.ports.base";
-
- public static int getSupervisorSlotsPortsBase(Map conf) {
- return JStormUtils.parseInt(conf.get(SUPERVISOR_SLOTS_PORTS_BASE), 6800);
- }
-
- // SUPERVISOR_SLOTS_PORTS_BASE don't provide setting function, it must be set by configuration
-
- protected static String SUPERVISOR_SLOTS_PORT_CPU_WEIGHT = "supervisor.slots.port.cpu.weight";
- public static double getSupervisorSlotsPortCpuWeight(Map conf) {
- Object value = conf.get(SUPERVISOR_SLOTS_PORT_CPU_WEIGHT);
- Double ret = JStormUtils.convertToDouble(value);
- if (ret == null) {
- return 1.0;
- }else {
- return ret;
- }
- }
- // SUPERVISOR_SLOTS_PORT_CPU_WEIGHT don't provide setting function, it must be set by configuration
-
- protected static String USER_DEFINED_LOG4J_CONF = "user.defined.log4j.conf";
-
- public static String getUserDefinedLog4jConf(Map conf) {
- return (String)conf.get(USER_DEFINED_LOG4J_CONF);
- }
-
- public static void setUserDefinedLog4jConf(Map conf, String fileName) {
- conf.put(USER_DEFINED_LOG4J_CONF, fileName);
- }
-
- protected static String USER_DEFINED_LOGBACK_CONF = "user.defined.logback.conf";
-
- public static String getUserDefinedLogbackConf(Map conf) {
- return (String)conf.get(USER_DEFINED_LOGBACK_CONF);
- }
-
- public static void setUserDefinedLogbackConf(Map conf, String fileName) {
- conf.put(USER_DEFINED_LOGBACK_CONF, fileName);
- }
-
- protected static String TASK_ERROR_INFO_REPORT_INTERVAL = "topology.task.error.report.interval";
-
- public static Integer getTaskErrorReportInterval(Map conf) {
- return JStormUtils.parseInt(conf.get(TASK_ERROR_INFO_REPORT_INTERVAL), 60);
- }
-
- public static void setTaskErrorReportInterval(Map conf, Integer interval) {
- conf.put(TASK_ERROR_INFO_REPORT_INTERVAL, interval);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/WorkerAssignment.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/WorkerAssignment.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/WorkerAssignment.java
deleted file mode 100644
index 9eac326..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/WorkerAssignment.java
+++ /dev/null
@@ -1,264 +0,0 @@
-package com.alibaba.jstorm.client;
-
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-import org.apache.log4j.Logger;
-import org.json.simple.JSONAware;
-
-import com.alibaba.jstorm.utils.JStormUtils;
-
-import backtype.storm.scheduler.WorkerSlot;
-import backtype.storm.utils.Utils;
-
-
-public class WorkerAssignment extends WorkerSlot implements Serializable,
- JSONAware {
- private static final Logger LOG = Logger.getLogger(WorkerAssignment.class);
-
-
- private static final long serialVersionUID = -3483047434535537861L;
-
- private Map<String, Integer> componentToNum = new HashMap<String, Integer>();
-
- private long mem;
-
- private int cpu;
-
- private String hostName;
-
- private String jvm;
-
- private static final String COMPONENTTONUM_TAG = "componentToNum";
- private static final String MEM_TAG = "mem";
- private static final String CPU_TAG = "cpu";
- private static final String HOSTNAME_TAG = "hostName";
- private static final String JVM_TAG = "jvm";
- private static final String NODEID_TAG = "nodeId";
- private static final String PORT_TAG = "port";
-
- public WorkerAssignment(String nodeId, Number port) {
- super(nodeId, port);
- // TODO Auto-generated constructor stub
- }
-
- public WorkerAssignment() {
-
- }
-
- public void addComponent(String compenentName, Integer num) {
- componentToNum.put(compenentName, num);
- }
-
- public Map<String, Integer> getComponentToNum() {
- return componentToNum;
- }
-
- public String getHostName() {
- return hostName;
- }
-
- public void setHostName(String hostName) {
- this.hostName = hostName;
- }
-
- public void setJvm(String jvm) {
- this.jvm = jvm;
- }
-
- public String getJvm() {
- return jvm;
- }
-
- public long getMem() {
- return mem;
- }
-
- public void setMem(long mem) {
- this.mem = mem;
- }
-
- public int getCpu() {
- return cpu;
- }
-
- public void setCpu(int cpu) {
- this.cpu = cpu;
- }
-
- @Override
- public String toJSONString() {
-// StringBuilder sb = new StringBuilder();
-
-// sb.append("[");
-// sb.append("\"" + this.getNodeId() + "\"");
-// sb.append(",");
-// sb.append("\"" + this.hostName + "\"");
-// sb.append(",");
-// sb.append("\"" + String.valueOf(this.getPort()) + "\"");
-// sb.append(",");
-// sb.append("\"" + this.jvm + "\"");
-// sb.append(",");
-// sb.append("\"" + String.valueOf(this.mem) + "\"");
-// sb.append(",");
-// sb.append("\"" + String.valueOf(this.cpu) + "\"");
-// sb.append(",");
-// sb.append("{");
-// for (Entry<String, Integer> entry : componentToNum.entrySet()) {
-// sb.append("\"" + entry.getKey() + "\":");
-// sb.append("\"" + String.valueOf(entry.getValue()) + "\"");
-// sb.append(",");
-// }
-// sb.append("}");
-// sb.append("]");
-
-
-
- Map<String, String> map = new HashMap<String, String>();
-
- map.put(COMPONENTTONUM_TAG, Utils.to_json(componentToNum));
- map.put(MEM_TAG, String.valueOf(mem));
- map.put(CPU_TAG, String.valueOf(cpu));
- map.put(HOSTNAME_TAG, hostName);
- map.put(JVM_TAG, jvm);
- map.put(NODEID_TAG, getNodeId());
- map.put(PORT_TAG, String.valueOf(getPort()));
-
-
- return Utils.to_json(map);
- }
-
- public static WorkerAssignment parseFromObj(Object obj) {
- if (obj == null) {
- return null;
- }
-
- if (obj instanceof Map == false) {
- return null;
- }
-
- try {
- Map<String, String> map = (Map<String, String>)obj;
-
- String supervisorId = map.get(NODEID_TAG);
- String hostname = map.get(HOSTNAME_TAG);
- Integer port = JStormUtils.parseInt(map.get(PORT_TAG));
- String jvm = map.get(JVM_TAG);
- Long mem = JStormUtils.parseLong(map.get(MEM_TAG));
- Integer cpu = JStormUtils.parseInt(map.get(CPU_TAG));
- Map<String, Object> componentToNum = (Map<String, Object>)Utils.from_json(map.get(COMPONENTTONUM_TAG));
-
- WorkerAssignment ret = new WorkerAssignment(supervisorId, port);
-
-
- ret.hostName = hostname;
- ret.setNodeId(supervisorId);
- ret.setJvm(jvm);
- if (port != null) {
- ret.setPort(port);
- }
- if (mem != null) {
- ret.setMem(mem);
- }
- if (cpu != null) {
- ret.setCpu(cpu);
- }
-
- for (Entry<String, Object> entry : componentToNum.entrySet()) {
- ret.addComponent(entry.getKey(),
- JStormUtils.parseInt(entry.getValue()));
- }
- return ret;
- } catch (Exception e) {
- LOG.error("Failed to convert to WorkerAssignment, raw:" + obj, e);
- return null;
- }
-
- }
-
- public static String getStringFromJson(String text) {
- return text.equals("null") ? null : text;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = super.hashCode();
- result = prime * result
- + ((componentToNum == null) ? 0 : componentToNum.hashCode());
- result = prime * result + cpu;
- result = prime * result
- + ((hostName == null) ? 0 : hostName.hashCode());
- result = prime * result + ((jvm == null) ? 0 : jvm.hashCode());
- result = prime * result + (int) (mem ^ (mem >>> 32));
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (!super.equals(obj))
- return false;
- if (getClass() != obj.getClass())
- return false;
- WorkerAssignment other = (WorkerAssignment) obj;
- if (componentToNum == null) {
- if (other.componentToNum != null)
- return false;
- } else if (!componentToNum.equals(other.componentToNum))
- return false;
- if (cpu != other.cpu)
- return false;
- if (hostName == null) {
- if (other.hostName != null)
- return false;
- } else if (!hostName.equals(other.hostName))
- return false;
- if (jvm == null) {
- if (other.jvm != null)
- return false;
- } else if (!jvm.equals(other.jvm))
- return false;
- if (mem != other.mem)
- return false;
- return true;
- }
-
- public static void main(String[] args) {
- WorkerAssignment input = new WorkerAssignment();
-
- input.setJvm("sb");
-
- input.setCpu(1);
-
- input.setMem(2);
-
- input.addComponent("2b", 2);
-
- String outString = Utils.to_json(input);
-
- System.out.println(input);
-
- //String outString = "[componentToNum={},mem=1610612736,cpu=1,hostName=mobilejstorm-60-1,jvm=<null>,nodeId=<null>,port=0]";
-
- Object object = Utils.from_json(outString);
- System.out.println(object);
-
- System.out.println(parseFromObj(object));
-
- System.out.print(input.equals(parseFromObj(object)));
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricCallback.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricCallback.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricCallback.java
deleted file mode 100644
index 964913e..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricCallback.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package com.alibaba.jstorm.client.metric;
-
-import com.codahale.metrics.Metric;
-
-public interface MetricCallback<T extends Metric> {
- void callback(T metric);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricClient.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricClient.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricClient.java
deleted file mode 100644
index becc365..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/metric/MetricClient.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package com.alibaba.jstorm.client.metric;
-
-import backtype.storm.task.TopologyContext;
-
-import com.alibaba.jstorm.metric.Metrics;
-import com.codahale.metrics.Counter;
-import com.codahale.metrics.Gauge;
-import com.codahale.metrics.Histogram;
-import com.codahale.metrics.Meter;
-import com.codahale.metrics.Timer;
-import com.alibaba.jstorm.metric.JStormTimer;
-import com.alibaba.jstorm.metric.JStormHistogram;
-
-public class MetricClient {
-
- private final int taskid;
-
- public MetricClient(TopologyContext context) {
- taskid = context.getThisTaskId();
- }
-
- private String getMetricName(Integer taskid, String name) {
- return "task-" + String.valueOf(taskid) + ":" + name;
- }
-
- public Gauge<?> registerGauge(String name, Gauge<?> gauge, MetricCallback<Gauge<?>> callback) {
- String userMetricName = getMetricName(taskid, name);
- Gauge<?> ret = Metrics.registerGauge(userMetricName, gauge);
- Metrics.registerUserDefine(userMetricName, gauge, callback);
- return ret;
- }
-
- public Counter registerCounter(String name, MetricCallback<Counter> callback) {
- String userMetricName = getMetricName(taskid, name);
- Counter ret = Metrics.registerCounter(userMetricName);
- Metrics.registerUserDefine(userMetricName, ret, callback);
- return ret;
- }
-
- public Meter registerMeter(String name, MetricCallback<Meter> callback) {
- String userMetricName = getMetricName(taskid, name);
- Meter ret = Metrics.registerMeter(userMetricName);
- Metrics.registerUserDefine(userMetricName, ret, callback);
- return ret;
- }
-
- public JStormTimer registerTimer(String name, MetricCallback<Timer> callback) {
- String userMetricName = getMetricName(taskid, name);
- JStormTimer ret = Metrics.registerTimer(userMetricName);
- Metrics.registerUserDefine(userMetricName, ret, callback);
- return ret;
- }
-
- public JStormHistogram registerHistogram(String name, MetricCallback<Histogram> callback) {
- String userMetricName = getMetricName(taskid, name);
- JStormHistogram ret = Metrics.registerHistograms(userMetricName);
- Metrics.registerUserDefine(userMetricName, ret, callback);
- return ret;
- }
-
- public boolean unregister(String name, Integer taskid) {
- String userMetricName = getMetricName(taskid, name);
- return Metrics.unregisterUserDefine(userMetricName);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IAckValueSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IAckValueSpout.java b/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IAckValueSpout.java
deleted file mode 100644
index f140098..0000000
--- a/jstorm-client-extension/src/main/java/com/alibaba/jstorm/client/spout/IAckValueSpout.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package com.alibaba.jstorm.client.spout;
-
-import java.util.List;
-
-/**
- * This interface will list emit values when tuple success
- *
- * if spout implement this interface,
- * spout won't call ISpout.ack() when tuple success
- *
- * @author longda
- */
-public interface IAckValueSpout {
- void ack(Object msgId, List<Object> values);
-}
[50/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/LICENSE
----------------------------------------------------------------------
diff --git a/LICENSE b/LICENSE
old mode 100644
new mode 100755
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/README.md
----------------------------------------------------------------------
diff --git a/README.md b/README.md
old mode 100644
new mode 100755
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/bin/check_jstorm_Supervisor.sh
----------------------------------------------------------------------
diff --git a/bin/check_jstorm_Supervisor.sh b/bin/check_jstorm_Supervisor.sh
new file mode 100644
index 0000000..1ec4394
--- /dev/null
+++ b/bin/check_jstorm_Supervisor.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+JAVA_HOME=/opt/taobao/java
+export PATH=$PATH:$JAVA_HOME/bin
+
+LOG=/home/admin/logs/check.log
+SP=`ps -ef |grep com.alibaba.jstorm.daemon.supervisor.Supervisor |grep -v grep |wc -l`
+if [ $SP -lt 1 ];then
+ mkdir -p /home/admin/logs
+ echo -e "`date` [ERROR] no process and restart Jstorm Suppervisor" >>$LOG
+ cd /home/admin/bin; nohup /home/admin/jstorm/bin/jstorm supervisor >/dev/null 2>&1 &
+else
+ echo -e "`date` [INFO:] return $SP Jstorm Supervisor ok " >>$LOG
+fi
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/bin/jstorm.py
----------------------------------------------------------------------
diff --git a/bin/jstorm.py b/bin/jstorm.py
new file mode 100755
index 0000000..c4b3fe7
--- /dev/null
+++ b/bin/jstorm.py
@@ -0,0 +1,459 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/usr/bin/python
+
+import os
+import sys
+import random
+import subprocess as sub
+import getopt
+
+def identity(x):
+ return x
+
+def cygpath(x):
+ command = ["cygpath", "-wp", x]
+ p = sub.Popen(command,stdout=sub.PIPE)
+ output, errors = p.communicate()
+ lines = output.split("\n")
+ return lines[0]
+
+if sys.platform == "cygwin":
+ normclasspath = cygpath
+else:
+ normclasspath = identity
+
+CLIENT_CONF_FILE = ""
+JSTORM_DIR = "/".join(os.path.realpath( __file__ ).split("/")[:-2])
+JSTORM_CONF_DIR = os.getenv("JSTORM_CONF_DIR", JSTORM_DIR + "/conf" )
+LOGBACK_CONF = JSTORM_CONF_DIR + "/jstorm.logback.xml"
+CONFIG_OPTS = []
+EXCLUDE_JARS = []
+INCLUDE_JARS = []
+STATUS = 0
+
+
+def check_java():
+ check_java_cmd = 'which java'
+ ret = os.system(check_java_cmd)
+ if ret != 0:
+ print("Failed to find java, please add java to PATH")
+ sys.exit(-1)
+
+def get_config_opts():
+ global CONFIG_OPTS
+ return "-Dstorm.options=" + (','.join(CONFIG_OPTS)).replace(' ', "%%%%")
+
+def get_client_childopts():
+ ret = (" -Dstorm.root.logger=INFO,stdout -Dlogback.configurationFile=" + JSTORM_DIR +
+ "/conf/client_logback.xml -Dlog4j.configuration=File:" + JSTORM_DIR +
+ "/conf/client_log4j.properties")
+ if CLIENT_CONF_FILE != "":
+ ret += (" -Dstorm.conf.file=" + CLIENT_CONF_FILE)
+ return ret
+
+def get_server_childopts(log_name):
+ ret = (" -Dlogfile.name=%s -Dlogback.configurationFile=%s" %(log_name, LOGBACK_CONF))
+ return ret
+
+if not os.path.exists(JSTORM_DIR + "/RELEASE"):
+ print "******************************************"
+ print "The jstorm client can only be run from within a release. You appear to be trying to run the client from a checkout of JStorm's source code."
+ print "\nYou can download a JStorm release "
+ print "******************************************"
+ sys.exit(1)
+
+def get_jars_full(adir):
+ files = os.listdir(adir)
+ ret = []
+ for f in files:
+ if f.endswith(".jar") == False:
+ continue
+ filter = False
+ for exclude_jar in EXCLUDE_JARS:
+ if f.find(exclude_jar) >= 0:
+ filter = True
+ break
+
+ if filter == True:
+ print "Don't add " + f + " to classpath"
+ else:
+ ret.append(adir + "/" + f)
+ return ret
+
+def get_classpath(extrajars):
+ ret = []
+ ret.extend(extrajars)
+ ret.extend(get_jars_full(JSTORM_DIR))
+ ret.extend(get_jars_full(JSTORM_DIR + "/lib"))
+ ret.extend(INCLUDE_JARS)
+
+ return normclasspath(":".join(ret))
+
+def confvalue(name, extrapaths):
+ command = [
+ "java", "-client", "-Xms256m", "-Xmx256m", get_config_opts(), "-cp", get_classpath(extrapaths), "backtype.storm.command.config_value", name
+ ]
+ p = sub.Popen(command, stdout=sub.PIPE)
+ output, errors = p.communicate()
+ lines = output.split("\n")
+ for line in lines:
+ tokens = line.split(" ")
+ if tokens[0] == "VALUE:":
+ return " ".join(tokens[1:])
+ print "Failed to get config " + name
+ print errors
+ print output
+
+def print_localconfvalue(name):
+ """Syntax: [jstorm localconfvalue conf-name]
+
+ Prints out the value for conf-name in the local JStorm configs.
+ The local JStorm configs are the ones in ~/.jstorm/storm.yaml merged
+ in with the configs in defaults.yaml.
+ """
+ print name + ": " + confvalue(name, [JSTORM_CONF_DIR])
+
+def print_remoteconfvalue(name):
+ """Syntax: [jstorm remoteconfvalue conf-name]
+
+ Prints out the value for conf-name in the cluster's JStorm configs.
+ The cluster's JStorm configs are the ones in $STORM-PATH/conf/storm.yaml
+ merged in with the configs in defaults.yaml.
+
+ This command must be run on a cluster machine.
+ """
+ print name + ": " + confvalue(name, [JSTORM_CONF_DIR])
+
+def exec_storm_class(klass, jvmtype="-server", childopts="", extrajars=[], args=[]):
+ nativepath = confvalue("java.library.path", extrajars)
+ args_str = " ".join(map(lambda s: "\"" + s + "\"", args))
+ command = "java " + jvmtype + " -Djstorm.home=" + JSTORM_DIR + " " + get_config_opts() + " -Djava.library.path=" + nativepath + " " + childopts + " -cp " + get_classpath(extrajars) + " " + klass + " " + args_str
+ print "Running: " + command
+ global STATUS
+ STATUS = os.system(command)
+
+def jar(jarfile, klass, *args):
+ """Syntax: [jstorm jar topology-jar-path class ...]
+
+ Runs the main method of class with the specified arguments.
+ The jstorm jars and configs in $JSTORM_CONF_DIR/storm.yaml are put on the classpath.
+ The process is configured so that StormSubmitter
+ (https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation)
+ will upload the jar at topology-jar-path when the topology is submitted.
+ """
+ childopts = "-Dstorm.jar=" + jarfile + get_client_childopts()
+ exec_storm_class(
+ klass,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[jarfile, JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ args=args,
+ childopts=childopts)
+
+def zktool(*args):
+ """Syntax: [jstorm jar topology-jar-path class ...]
+
+ Runs the main method of class with the specified arguments.
+ The jstorm jars and configs in ~/.jstorm are put on the classpath.
+ The process is configured so that StormSubmitter
+ (https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation)
+ will upload the jar at topology-jar-path when the topology is submitted.
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "com.alibaba.jstorm.zk.ZkTool",
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[ JSTORM_CONF_DIR, CLIENT_CONF_FILE],
+ args=args,
+ childopts=childopts)
+
+def kill(*args):
+ """Syntax: [jstorm kill topology-name [wait-time-secs]]
+
+ Kills the topology with the name topology-name. JStorm will
+ first deactivate the topology's spouts for the duration of
+ the topology's message timeout to allow all messages currently
+ being processed to finish processing. JStorm will then shutdown
+ the workers and clean up their state. You can override the length
+ of time JStorm waits between deactivation and shutdown.
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.kill_topology",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def activate(*args):
+ """Syntax: [jstorm activate topology-name]
+
+ Activates the specified topology's spouts.
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.activate",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def deactivate(*args):
+ """Syntax: [jstorm deactivate topology-name]
+
+ Deactivates the specified topology's spouts.
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.deactivate",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def rebalance(*args):
+ """Syntax: [jstorm rebalance topology-name [-w wait-time-secs]]
+
+ Sometimes you may wish to spread out where the workers for a topology
+ are running. For example, let's say you have a 10 node cluster running
+ 4 workers per node, and then let's say you add another 10 nodes to
+ the cluster. You may wish to have JStorm spread out the workers for the
+ running topology so that each node runs 2 workers. One way to do this
+ is to kill the topology and resubmit it, but JStorm provides a "rebalance"
+ command that provides an easier way to do this.
+
+ Rebalance will first deactivate the topology for the duration of the
+ message timeout and then redistribute
+ the workers evenly around the cluster. The topology will then return to
+ its previous state of activation (so a deactivated topology will still
+ be deactivated and an activated topology will go back to being activated).
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.rebalance",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def restart(*args):
+ """Syntax: [jstorm restart topology-name [conf]]
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.restart",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def update_config(*args):
+ """Syntax: [jstorm restart topology-name [conf]]
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.update_config",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def nimbus():
+ """Syntax: [jstorm nimbus]
+
+ Launches the nimbus daemon. This command should be run under
+ supervision with a tool like daemontools or monit.
+
+ See Setting up a JStorm cluster for more information.
+ (https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation)
+ """
+ cppaths = [JSTORM_CONF_DIR]
+ nimbus_classpath = confvalue("nimbus.classpath", cppaths)
+ childopts = confvalue("nimbus.childopts", cppaths) + get_server_childopts("nimbus.log")
+ exec_storm_class(
+ "com.alibaba.jstorm.daemon.nimbus.NimbusServer",
+ jvmtype="-server",
+ extrajars=(cppaths+[nimbus_classpath]),
+ childopts=childopts)
+
+def supervisor():
+ """Syntax: [jstorm supervisor]
+
+ Launches the supervisor daemon. This command should be run
+ under supervision with a tool like daemontools or monit.
+
+ See Setting up a JStorm cluster for more information.
+ (https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation)
+ """
+ cppaths = [JSTORM_CONF_DIR]
+ childopts = confvalue("supervisor.childopts", cppaths) + get_server_childopts("supervisor.log")
+ exec_storm_class(
+ "com.alibaba.jstorm.daemon.supervisor.Supervisor",
+ jvmtype="-server",
+ extrajars=cppaths,
+ childopts=childopts)
+
+
+def drpc():
+ """Syntax: [jstorm drpc]
+
+ Launches a DRPC daemon. This command should be run under supervision
+ with a tool like daemontools or monit.
+
+ See Distributed RPC for more information.
+ (https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation)
+ """
+ cppaths = [JSTORM_CONF_DIR]
+ childopts = confvalue("drpc.childopts", cppaths) + get_server_childopts("drpc.log")
+ exec_storm_class(
+ "com.alibaba.jstorm.drpc.Drpc",
+ jvmtype="-server",
+ extrajars=cppaths,
+ childopts=childopts)
+
+def print_classpath():
+ """Syntax: [jstorm classpath]
+
+ Prints the classpath used by the jstorm client when running commands.
+ """
+ print get_classpath([])
+
+def print_commands():
+ """Print all client commands and link to documentation"""
+ print "jstorm command [--config client_storm.yaml] [--exclude-jars exclude1.jar,exclude2.jar] [-c key1=value1,key2=value2][command parameter]"
+ print "Commands:\n\t", "\n\t".join(sorted(COMMANDS.keys()))
+ print "\n\t[--config client_storm.yaml]\t\t\t optional, setting client's storm.yaml"
+ print "\n\t[--exclude-jars exclude1.jar,exclude2.jar]\t optional, exclude jars, avoid jar conflict"
+ print "\n\t[-c key1=value1,key2=value2]\t\t\t optional, add key=value pair to configuration"
+ print "\nHelp:", "\n\thelp", "\n\thelp <command>"
+ print "\nDocumentation for the jstorm client can be found at https://github.com/alibaba/jstorm/wiki/JStorm-Chinese-Documentation\n"
+
+def print_usage(command=None):
+ """Print one help message or list of available commands"""
+ if command != None:
+ if COMMANDS.has_key(command):
+ print (COMMANDS[command].__doc__ or
+ "No documentation provided for <%s>" % command)
+ else:
+ print "<%s> is not a valid command" % command
+ else:
+ print_commands()
+
+def unknown_command(*args):
+ print "Unknown command: [jstorm %s]" % ' '.join(sys.argv[1:])
+ print_usage()
+
+def metrics_Monitor(*args):
+ """Syntax: [jstorm metricsMonitor topologyname bool]
+ Enable or disable the metrics monitor of one topology.
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.metrics_monitor",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+def list(*args):
+ """Syntax: [jstorm list]
+
+ List cluster information
+ """
+ childopts = get_client_childopts()
+ exec_storm_class(
+ "backtype.storm.command.list",
+ args=args,
+ jvmtype="-client -Xms256m -Xmx256m",
+ extrajars=[JSTORM_CONF_DIR, JSTORM_DIR + "/bin", CLIENT_CONF_FILE],
+ childopts=childopts)
+
+COMMANDS = {"jar": jar, "kill": kill, "nimbus": nimbus, "zktool": zktool,
+ "drpc": drpc, "supervisor": supervisor, "localconfvalue": print_localconfvalue,
+ "remoteconfvalue": print_remoteconfvalue, "classpath": print_classpath,
+ "activate": activate, "deactivate": deactivate, "rebalance": rebalance, "help": print_usage,
+ "metricsMonitor": metrics_Monitor, "list": list, "restart": restart, "update_config": update_config}
+
+def parse_config(config_list):
+ global CONFIG_OPTS
+ if len(config_list) > 0:
+ for config in config_list:
+ CONFIG_OPTS.append(config)
+
+def parse_exclude_jars(jars):
+ global EXCLUDE_JARS
+ EXCLUDE_JARS = jars.split(",")
+ print " Excludes jars:"
+ print EXCLUDE_JARS
+
+def parse_include_jars(jars):
+ global INCLUDE_JARS
+ INCLUDE_JARS = jars.split(",")
+ print " Include jars:"
+ print INCLUDE_JARS
+
+def parse_config_opts(args):
+ curr = args[:]
+ curr.reverse()
+ config_list = []
+ args_list = []
+
+ while len(curr) > 0:
+ token = curr.pop()
+ if token == "-c":
+ config_list.append(curr.pop())
+ elif token == "--config":
+ global CLIENT_CONF_FILE
+ CLIENT_CONF_FILE = curr.pop()
+ elif token == "--exclude-jars":
+ parse_exclude_jars(curr.pop())
+ elif token == "--include-jars":
+ parse_include_jars(curr.pop())
+ else:
+ args_list.append(token)
+
+ return config_list, args_list
+
+def main():
+ if len(sys.argv) <= 1:
+ print_usage()
+ sys.exit(-1)
+ global CONFIG_OPTS
+ config_list, args = parse_config_opts(sys.argv[1:])
+ parse_config(config_list)
+ COMMAND = args[0]
+ ARGS = args[1:]
+ if COMMANDS.get(COMMAND) == None:
+ unknown_command(COMMAND)
+ sys.exit(-1)
+ if len(ARGS) != 0 and ARGS[0] == "help":
+ print_usage(COMMAND)
+ sys.exit(0)
+ try:
+ (COMMANDS.get(COMMAND, "help"))(*ARGS)
+ except Exception, msg:
+ print(msg)
+ print_usage(COMMAND)
+ sys.exit(-1)
+ sys.exit(STATUS)
+
+if __name__ == "__main__":
+ check_java()
+ main()
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/bin/start.sh
----------------------------------------------------------------------
diff --git a/bin/start.sh b/bin/start.sh
new file mode 100644
index 0000000..01f81e0
--- /dev/null
+++ b/bin/start.sh
@@ -0,0 +1,78 @@
+#!/bin/sh
+
+if [ -e ~/.bashrc ]
+then
+ source ~/.bashrc
+fi
+
+if [ -e ~/.bash_profile ]
+then
+ source ~/.bash_profile
+fi
+
+if [ "x$JAVA_HOME" != "x" ]
+then
+ echo "JAVA_HOME has been set "
+else
+ export JAVA_HOME=/opt/taobao/java
+fi
+echo "JAVA_HOME =" $JAVA_HOME
+
+if [ "x$JSTORM_HOME" != "x" ]
+then
+ echo "JSTORM_HOME has been set "
+else
+ export JSTORM_HOME=/home/admin/jstorm
+fi
+echo "JSTORM_HOME =" $JSTORM_HOME
+
+if [ "x$JSTORM_CONF_DIR_PATH" != "x" ]
+then
+ echo "JSTORM_CONF_DIR_PATH has been set "
+else
+ export JSTORM_CONF_DIR_PATH=$JSTORM_HOME/conf
+fi
+echo "JSTORM_CONF_DIR_PATH =" $JSTORM_CONF_DIR_PATH
+
+
+
+export PATH=$JAVA_HOME/bin:$JSTORM_HOME/bin:$PATH
+
+
+which java
+
+if [ $? -eq 0 ]
+then
+ echo "Find java"
+else
+ echo "No java, please install java firstly !!!"
+ exit 1
+fi
+
+function startJStorm()
+{
+ PROCESS=$1
+ echo "start $PROCESS"
+ cd $JSTORM_HOME/bin; nohup $JSTORM_HOME/bin/jstorm $PROCESS >/dev/null 2>&1 &
+ sleep 4
+ rm -rf nohup
+ ps -ef|grep $2
+}
+
+
+
+HOSTNAME=`hostname -i`
+NIMBUS_HOST=`grep "nimbus.host:" $JSTORM_CONF_DIR_PATH/storm.yaml | grep -w $HOSTNAME`
+SUPERVISOR_HOST_START=`grep "supervisor.host.start:" $JSTORM_CONF_DIR_PATH/storm.yaml | grep -w "false"`
+
+if [ "X${NIMBUS_HOST}" != "X" ]
+then
+ startJStorm "nimbus" "NimbusServer"
+fi
+
+if [ "X${SUPERVISOR_HOST_START}" == "X" ]
+then
+ startJStorm "supervisor" "Supervisor"
+fi
+
+echo "Successfully start jstorm daemon...."
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/bin/stop.sh
----------------------------------------------------------------------
diff --git a/bin/stop.sh b/bin/stop.sh
new file mode 100755
index 0000000..aa7935a
--- /dev/null
+++ b/bin/stop.sh
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+
+function killJStorm()
+{
+ ps -ef|grep $1|grep -v grep |awk '{print $2}' |xargs kill
+ sleep 1
+ ps -ef|grep $1
+
+ echo "kill "$1
+}
+
+killJStorm "Supervisor"
+killJStorm "NimbusServer"
+echo "Successfully stop jstorm"
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/cgconfig.conf
----------------------------------------------------------------------
diff --git a/conf/cgconfig.conf b/conf/cgconfig.conf
new file mode 100755
index 0000000..c21cd13
--- /dev/null
+++ b/conf/cgconfig.conf
@@ -0,0 +1,18 @@
+mount {
+ cpu = /cgroup/cpu;
+}
+
+group jstorm {
+ perm {
+ task {
+ uid = 500;
+ gid = 500;
+ }
+ admin {
+ uid = 500;
+ gid = 500;
+ }
+ }
+ cpu {
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/client_log4j.properties
----------------------------------------------------------------------
diff --git a/conf/client_log4j.properties b/conf/client_log4j.properties
new file mode 100755
index 0000000..e80ce0b
--- /dev/null
+++ b/conf/client_log4j.properties
@@ -0,0 +1,19 @@
+#This file should be deleted when deployed to server (workaround to leiningen classpath putting dev resources on path)
+#This file is needed for tests
+
+
+storm.root.logger=INFO, D
+
+log4j.rootLogger=${storm.root.logger}
+#log4j.rootLogger=INFO,stdout, D
+
+
+
+### output to console ###
+log4j.appender.stdout = org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target = System.out
+log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern = [%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n
+
+log4j.category.org.apache.zookeeper=warn
+log4j.category.com.netflix.curator=warn
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/client_logback.xml
----------------------------------------------------------------------
diff --git a/conf/client_logback.xml b/conf/client_logback.xml
new file mode 100755
index 0000000..02ef72e
--- /dev/null
+++ b/conf/client_logback.xml
@@ -0,0 +1,18 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<configuration scan="true" scanPeriod="30 seconds">
+ <appender name="A1" class="ch.qos.logback.core.ConsoleAppender">
+ <encoder>
+ <pattern>%-4r [%t] %-5p %c - %m%n</pattern>
+ </encoder>
+ </appender>
+ <logger name="org.apache.zookeeper" level="ERROR" />
+ <logger name="com.netflix.curator" level="ERROR" />
+ <logger name="com.alibaba.jstorm.common.metric" level="ERROR" />
+ <logger name="com.alibaba.jstorm.daemon.nimbus.TopologyMetricsRunnable" level="ERROR" />
+ <logger name="com.alibaba.jstorm.metric" level="ERROR" />
+
+ <root level="INFO">
+ <appender-ref ref="A1" />
+ </root>
+</configuration>
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/jstorm.log4j.properties
----------------------------------------------------------------------
diff --git a/conf/jstorm.log4j.properties b/conf/jstorm.log4j.properties
new file mode 100755
index 0000000..ef31aed
--- /dev/null
+++ b/conf/jstorm.log4j.properties
@@ -0,0 +1,50 @@
+#This file should be deleted when deployed to server (workaround to leiningen classpath putting dev resources on path)
+#This file is needed for tests
+
+storm.root.logger=INFO, D
+
+log4j.rootLogger=${storm.root.logger}
+#log4j.rootLogger=INFO,stdout, D, jmonitor
+
+### output to console ###
+### In order to avoid dead lock, redirect supervisor out/err to /dev/null
+### Stdout logger can't be used until manually start worker
+log4j.appender.stdout = org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.Target = System.out
+log4j.appender.stdout.layout = org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern = [%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n
+
+### output to file ###
+log4j.appender.D = org.apache.log4j.RollingFileAppender
+log4j.appender.D.File = ${jstorm.home}/logs/${logfile.name}
+log4j.appender.D.Append = true
+log4j.appender.D.Threshold = INFO
+log4j.appender.D.MaxFileSize=1GB
+log4j.appender.D.MaxBackupIndex=5
+log4j.appender.D.layout = org.apache.log4j.PatternLayout
+log4j.appender.D.layout.ConversionPattern = [%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n
+
+log4j.logger.com.alibaba.jstorm=INFO
+
+### jstorm metrics ###
+log4j.logger.com.alibaba.jstorm.daemon.worker.metrics= INFO, M
+log4j.additivity.com.alibaba.jstorm.daemon.worker.metrics=false
+log4j.logger.com.alibaba.jstorm.task.heartbeat= INFO, M
+log4j.additivity.com.alibaba.jstorm.task.heartbeat=false
+log4j.logger.com.alibaba.jstorm.daemon.worker.hearbeat= INFO, M
+log4j.additivity.com.alibaba.jstorm.daemon.worker.hearbeat=false
+log4j.logger.com.alibaba.jstorm.metric= INFO, M
+log4j.additivity.com.alibaba.jstorm.metric=false
+
+log4j.appender.M = org.apache.log4j.RollingFileAppender
+log4j.appender.M.File = ${jstorm.home}/logs/${logfile.name}.metrics
+log4j.appender.M.Append = true
+log4j.appender.M.Threshold = INFO
+log4j.appender.M.MaxFileSize=100MB
+log4j.appender.M.MaxBackupIndex=5
+log4j.appender.M.layout = org.apache.log4j.PatternLayout
+log4j.appender.M.layout.ConversionPattern = [%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n
+
+
+##################jmonitor appender ##########################
+#log4j.appender.jmonitor=com.alibaba.alimonitor.jmonitor.plugin.log4j.JMonitorLog4jAppender
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/jstorm.logback.xml
----------------------------------------------------------------------
diff --git a/conf/jstorm.logback.xml b/conf/jstorm.logback.xml
new file mode 100755
index 0000000..bd097ea
--- /dev/null
+++ b/conf/jstorm.logback.xml
@@ -0,0 +1,84 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<configuration scan="true" scanPeriod="60 seconds">
+ <appender name="A1"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${jstorm.home}/logs/${logfile.name}</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${jstorm.home}/logs/${logfile.name}.%i</fileNamePattern>
+ <minIndex>1</minIndex>
+ <maxIndex>5</maxIndex>
+ </rollingPolicy>
+
+ <triggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>1GB</maxFileSize>
+ </triggeringPolicy>
+
+ <encoder>
+ <pattern>[%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n</pattern>
+
+ </encoder>
+ </appender>
+
+ <appender name="METRICS"
+ class="ch.qos.logback.core.rolling.RollingFileAppender">
+ <file>${jstorm.home}/logs/${logfile.name}.metrics</file>
+ <rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
+ <fileNamePattern>${jstorm.home}/logs/${logfile.name}.metrics.%i</fileNamePattern>
+ <minIndex>1</minIndex>
+ <maxIndex>5</maxIndex>
+ </rollingPolicy>
+
+ <triggeringPolicy
+ class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
+ <maxFileSize>100MB</maxFileSize>
+ </triggeringPolicy>
+
+ <encoder>
+ <pattern>[%p %d{yyyy-MM-dd HH:mm:ss} %c{1}:%L %t] %m%n</pattern>
+ </encoder>
+ </appender>
+
+ <root level="INFO">
+ <appender-ref ref="A1" />
+ </root>
+
+ <logger name="com.alibaba.jstorm"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="A1" />
+ </logger>
+
+ <logger name="com.alibaba.jstorm.common.metric"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="METRICS" />
+ </logger>
+
+ <logger name="com.alibaba.jstorm.task.heartbeat"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="METRICS" />
+ </logger>
+
+ <logger name="com.alibaba.jstorm.daemon.worker.hearbeat"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="METRICS" />
+ </logger>
+
+ <logger name="com.alibaba.jstorm.daemon.nimbus.TopologyMetricsRunnable"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="METRICS" />
+ </logger>
+
+ <logger name="com.alibaba.jstorm.metric"
+ additivity="false">
+ <level value="INFO" />
+ <appender-ref ref="METRICS" />
+ </logger>
+
+</configuration>
+
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/conf/storm.yaml
----------------------------------------------------------------------
diff --git a/conf/storm.yaml b/conf/storm.yaml
new file mode 100755
index 0000000..99bc984
--- /dev/null
+++ b/conf/storm.yaml
@@ -0,0 +1,83 @@
+########### These MUST be filled in for a storm configuration
+ storm.zookeeper.servers:
+ - "localhost"
+
+ storm.zookeeper.root: "/jstorm"
+
+ #nimbus.host is being used by $JSTORM_HOME/bin/start.sh
+ #it only support IP, please don't set hostname
+ # For example
+ # nimbus.host: "10.132.168.10, 10.132.168.45"
+ #nimbus.host: "localhost"
+
+# %JSTORM_HOME% is the jstorm home directory
+ storm.local.dir: "%JSTORM_HOME%/data"
+
+ java.library.path: "/usr/local/lib:/opt/local/lib:/usr/lib"
+
+
+
+# if supervisor.slots.ports is null,
+# the port list will be generated by cpu cores and system memory size
+# for example, if there are 24 cpu cores and supervisor.slots.port.cpu.weight is 1.2
+# then there are 24/1.2 ports for cpu,
+# there are system_physical_memory_size/worker.memory.size ports for memory
+# The final port number is min(cpu_ports, memory_port)
+ supervisor.slots.ports.base: 6800
+ supervisor.slots.port.cpu.weight: 1
+ supervisor.slots.ports: null
+#supervisor.slots.ports:
+# - 6800
+# - 6801
+# - 6802
+# - 6803
+
+# Default disable user-define classloader
+# If there are jar conflict between jstorm and application,
+# please enable it
+ topology.enable.classloader: false
+
+# enable supervisor use cgroup to make resource isolation
+# Before enable it, you should make sure:
+# 1. Linux version (>= 2.6.18)
+# 2. Have installed cgroup (check the file's existence:/proc/cgroups)
+# 3. You should start your supervisor on root
+# You can get more about cgroup:
+# http://t.cn/8s7nexU
+ supervisor.enable.cgroup: false
+
+
+### Netty will send multiple messages in one batch
+### Setting true will improve throughput, but more latency
+ storm.messaging.netty.transfer.async.batch: true
+
+### if this setting is true, it will use disruptor as internal queue, which size is limited
+### otherwise, it will use LinkedBlockingDeque as internal queue , which size is unlimited
+### generally when this setting is true, the topology will be more stable,
+### but when there is a data loop flow, for example A -> B -> C -> A
+### and the data flow occur blocking, please set this as false
+ topology.buffer.size.limited: true
+
+### default worker memory size, unit is byte
+ worker.memory.size: 2147483648
+
+# Metrics Monitor
+# topology.performance.metrics: it is the switch flag for performance
+# purpose. When it is disabled, the data of timer and histogram metrics
+# will not be collected.
+# topology.alimonitor.metrics.post: If it is disable, metrics data
+# will only be printed to log. If it is enabled, the metrics data will be
+# posted to alimonitor besides printing to log.
+ topology.performance.metrics: true
+ topology.alimonitor.metrics.post: false
+
+# UI MultiCluster
+# Following is an example of multicluster UI configuration
+# ui.clusters:
+# - {
+# name: "jstorm",
+# zkRoot: "/jstorm",
+# zkServers:
+# [ "localhost"],
+# zkPort: 2181,
+# }
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/dev-tools/add_apache_license.sh
----------------------------------------------------------------------
diff --git a/dev-tools/add_apache_license.sh b/dev-tools/add_apache_license.sh
new file mode 100755
index 0000000..26a0054
--- /dev/null
+++ b/dev-tools/add_apache_license.sh
@@ -0,0 +1 @@
+find . -name \*.java -exec sh -c "if ! grep -q 'LICENSE-2.0' '{}';then mv '{}' tmp && cp LICENSEHEADER.txt '{}' && cat tmp >> '{}' && rm tmp;fi" \;
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/dev-tools/java_license_header.txt
----------------------------------------------------------------------
diff --git a/dev-tools/java_license_header.txt b/dev-tools/java_license_header.txt
new file mode 100755
index 0000000..7e66353
--- /dev/null
+++ b/dev-tools/java_license_header.txt
@@ -0,0 +1,17 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/dev-tools/py_license_header.txt
----------------------------------------------------------------------
diff --git a/dev-tools/py_license_header.txt b/dev-tools/py_license_header.txt
new file mode 100755
index 0000000..0896fcd
--- /dev/null
+++ b/dev-tools/py_license_header.txt
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/dev-tools/storm-eclipse-java-formatter.xml
----------------------------------------------------------------------
diff --git a/dev-tools/storm-eclipse-java-formatter.xml b/dev-tools/storm-eclipse-java-formatter.xml
new file mode 100755
index 0000000..25e6f92
--- /dev/null
+++ b/dev-tools/storm-eclipse-java-formatter.xml
@@ -0,0 +1,291 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<profiles version="12">
+<profile kind="CodeFormatterProfile" name="Apache Storm Java Formatter" version="12">
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_before_root_tags" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.disabling_tag" value="@formatter:off"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_annotation" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_anonymous_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_case" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_block_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_annotation_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_closing_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_field" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.use_on_off_tags" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_else_in_if_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_else_statement_on_same_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_ellipsis" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.insert_new_line_for_parameter" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_breaks_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_multiple_fields" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_expressions_in_array_initializer" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_conditional_expression" value="80"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_array_initializer" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_finally_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_local_variable" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_catch_in_try_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_while" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_package" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_superinterfaces" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_new_chunk" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_binary_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_package" value="0"/>
+<setting id="org.eclipse.jdt.core.compiler.source" value="1.7"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_constant_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_line_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_enum_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.join_wrapped_lines" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_explicit_constructor_call" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_invocation_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_member_type" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.align_type_members_on_columns" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_method_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_selector_in_method_invocation" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_case" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_parameter_description" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_switch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_block_comment" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.lineSplit" value="160"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_if" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_brackets_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_explicitconstructorcall_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_constructor_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_first_class_body_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_method" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indentation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.enabling_tag" value="@formatter:on"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superclass_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_assignment" value="16"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.assertIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.char" value="space"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_try_resources" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_prefix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_body" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_method" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_outer_expressions_when_nested" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.format_guardian_clause_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_labeled_statement" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_annotation_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_method_body" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_method_declaration" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_try" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_at_in_annotation_type_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_switch" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_return" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_question_in_wildcard" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_try" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.preserve_white_space_between_code_and_line_comments" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_parenthesized_expression_in_throw" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.problem.enumIdentifier" value="error"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_switch" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_ellipsis" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_inits" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_method_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.compact_else_if" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_or_operator_multicatch" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_increments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.format_line_comment_starting_on_first_column" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_field" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_constant" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.indent_root_tags" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_union_type_in_multicatch" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_explicitconstructorcall_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_method_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_superinterfaces" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.tabulation.size" value="4"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_opening_brace_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_brace_in_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_constant" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_throws" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_if" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.clear_blank_lines_in_javadoc_comment" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_constructor_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_assignment_operator" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_empty_lines" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_paren_in_cast" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_declaration_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_block_in_case" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_empty_lines_to_preserve" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_bracket_in_array_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_qualified_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.compiler.compliance" value="1.7"/>
+<setting id="org.eclipse.jdt.core.formatter.continuation_indentation_for_array_initializer" value="2"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_brackets_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_at_in_annotation_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_allocation_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_cast" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_unary_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_empty_array_initializer_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_enum_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_imple_if_on_one_line" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_constructor_declaration_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_closing_angle_bracket_in_type_parameters" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_at_end_of_file_if_missing" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_labeled_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_type_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_binary_expression" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_enum_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_type" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_while" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode" value="enabled"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_try" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.put_empty_statement_on_new_line" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_label" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_parameter" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_before_while_in_do_statement" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_enum_constant" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_javadoc_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.line_length" value="160"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_after_annotation_on_package" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_import_groups" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_enum_constant_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_constructor_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.number_of_blank_lines_at_beginning_of_method_body" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_type_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_annotation_type_member_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.wrap_before_binary_operator" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_between_type_declarations" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_synchronized" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_statements_compare_to_block" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_superinterfaces_in_enum_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.join_lines_in_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_question_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_field_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_compact_if" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_for_inits" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_switchstatements_compare_to_cases" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_default" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_and_in_type_parameter" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_parens_in_constructor_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_before_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_html" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_throws_clause_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_angle_bracket_in_type_parameters" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_bracket_in_array_allocation_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_anonymous_type_declaration" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_colon_in_conditional" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_postfix_operator" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_source_code" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_synchronized" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_allocation_expression" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_constructor_declaration_throws" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_parameters_in_method_declaration" value="16"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.compiler.codegen.targetPlatform" value="1.7"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_resources_in_try" value="160"/>
+<setting id="org.eclipse.jdt.core.formatter.use_tabs_only_for_leading_indentations" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_arguments_in_annotation" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_header" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.format_block_comments" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_enum_constant" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.alignment_for_enum_constants" value="0"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_new_line_in_empty_block" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_annotation_declaration_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_parenthesized_expression" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_catch" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_multiple_local_declarations" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_switch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_comma_in_for_increments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_invocation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_colon_in_assert" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.brace_position_for_type_declaration" value="end_of_line"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_brace_in_array_initializer" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_between_empty_braces_in_array_initializer" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_opening_paren_in_method_declaration" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_for" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_paren_in_catch" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_parameterized_type_reference" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_field_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_closing_paren_in_annotation" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_parameterized_type_reference" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_method_invocation_arguments" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.comment.new_lines_at_javadoc_boundaries" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.blank_lines_after_imports" value="1"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_comma_in_multiple_local_declarations" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.indent_body_declarations_compare_to_enum_constant_header" value="true"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_after_semicolon_in_for" value="insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_line_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_semicolon_in_try_resources" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.insert_space_before_opening_angle_bracket_in_type_arguments" value="do not insert"/>
+<setting id="org.eclipse.jdt.core.formatter.never_indent_block_comments_on_first_column" value="false"/>
+<setting id="org.eclipse.jdt.core.formatter.keep_then_statement_on_same_line" value="false"/>
+</profile>
+</profiles>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/docs/log.test.xlsx
----------------------------------------------------------------------
diff --git a/docs/log.test.xlsx b/docs/log.test.xlsx
new file mode 100755
index 0000000..b178ba7
Binary files /dev/null and b/docs/log.test.xlsx differ
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.classpath
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.classpath b/example/sequence-split-merge/.classpath
new file mode 100755
index 0000000..f8ce0d3
--- /dev/null
+++ b/example/sequence-split-merge/.classpath
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+ <classpathentry kind="src" output="target/classes" path="src/main/java">
+ <attributes>
+ <attribute name="optional" value="true"/>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="src" output="target/test-classes" path="src/test/java">
+ <attributes>
+ <attribute name="optional" value="true"/>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
+ <attributes>
+ <attribute name="maven.pomderived" value="true"/>
+ </attributes>
+ </classpathentry>
+ <classpathentry kind="output" path="target/classes"/>
+</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.gitignore
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.gitignore b/example/sequence-split-merge/.gitignore
new file mode 100755
index 0000000..1dd3331
--- /dev/null
+++ b/example/sequence-split-merge/.gitignore
@@ -0,0 +1,2 @@
+/target/
+/target/
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.project
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.project b/example/sequence-split-merge/.project
new file mode 100755
index 0000000..4269e67
--- /dev/null
+++ b/example/sequence-split-merge/.project
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+ <name>sequence-split-merge</name>
+ <comment></comment>
+ <projects>
+ </projects>
+ <buildSpec>
+ <buildCommand>
+ <name>org.eclipse.jdt.core.javabuilder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ <buildCommand>
+ <name>org.eclipse.m2e.core.maven2Builder</name>
+ <arguments>
+ </arguments>
+ </buildCommand>
+ </buildSpec>
+ <natures>
+ <nature>org.eclipse.jdt.core.javanature</nature>
+ <nature>org.eclipse.m2e.core.maven2Nature</nature>
+ </natures>
+</projectDescription>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs b/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
new file mode 100755
index 0000000..8bc0e1c
--- /dev/null
+++ b/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
@@ -0,0 +1,5 @@
+eclipse.preferences.version=1
+encoding//src/main/java=UTF-8
+encoding//src/test/java=UTF-8
+encoding//src/test/resources=UTF-8
+encoding/<project>=UTF-8
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs b/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
new file mode 100755
index 0000000..14f521d
--- /dev/null
+++ b/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,5 @@
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs b/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
new file mode 100755
index 0000000..14b697b
--- /dev/null
+++ b/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
@@ -0,0 +1,4 @@
+activeProfiles=
+eclipse.preferences.version=1
+resolveWorkspaceProjects=true
+version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/conf/conf.prop
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/conf/conf.prop b/example/sequence-split-merge/conf/conf.prop
old mode 100644
new mode 100755
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/conf/conf.yaml
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/conf/conf.yaml b/example/sequence-split-merge/conf/conf.yaml
old mode 100644
new mode 100755
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/conf/topology.yaml
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/conf/topology.yaml b/example/sequence-split-merge/conf/topology.yaml
old mode 100644
new mode 100755
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/drpc.sh
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/drpc.sh b/example/sequence-split-merge/drpc.sh
new file mode 100755
index 0000000..1492a51
--- /dev/null
+++ b/example/sequence-split-merge/drpc.sh
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+jstorm jar target/sequence-split-merge-1.1.0-jar-with-dependencies.jar com.alipay.dw.jstorm.example.drpc.ReachTopology reach
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/pom.xml
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/pom.xml b/example/sequence-split-merge/pom.xml
old mode 100644
new mode 100755
index 71f5154..afba08c
--- a/example/sequence-split-merge/pom.xml
+++ b/example/sequence-split-merge/pom.xml
@@ -3,7 +3,7 @@
<modelVersion>4.0.0</modelVersion>
<groupId>storm</groupId>
<artifactId>sequence-split-merge</artifactId>
- <version>1.0.8</version>
+ <version>1.1.0</version>
<packaging>jar</packaging>
<name>sequence-split-merge</name>
@@ -11,7 +11,7 @@
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
- <jstorm.version>0.9.6.3</jstorm.version>
+ <jstorm.version>2.0.4-SNAPSHOT</jstorm.version>
<storm.version>storm-0.9.2-incubating</storm.version>
</properties>
<repositories>
@@ -34,33 +34,22 @@
<dependency>
<groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client-extension</artifactId>
+ <artifactId>jstorm-core</artifactId>
<version>${jstorm.version}</version>
<scope>provided</scope>
</dependency>
-
- <dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-client</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
- <exclusions>
- <exclusion>
- <groupId>org.slf4j</groupId>
- <artifactId>slf4j-log4j12</artifactId>
- </exclusion>
- </exclusions>
- </dependency>
-
+
+
<dependency>
- <groupId>com.alibaba.jstorm</groupId>
- <artifactId>jstorm-server</artifactId>
- <version>${jstorm.version}</version>
- <scope>provided</scope>
-
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.10</version>
+ <scope>test</scope>
</dependency>
+
+ <!--
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
@@ -72,16 +61,7 @@
<artifactId>log4j-over-slf4j</artifactId>
<version>1.7.10</version>
</dependency>
-
- <dependency>
- <groupId>junit</groupId>
- <artifactId>junit</artifactId>
- <version>4.10</version>
- <scope>test</scope>
- </dependency>
-
-
- <!-- <dependency> <groupId>org.clojure</groupId> <artifactId>clojure</artifactId>
+ <dependency> <groupId>org.clojure</groupId> <artifactId>clojure</artifactId>
<version>1.2.0</version> </dependency> <dependency> <groupId>org.clojure</groupId>
<artifactId>clojure-contrib</artifactId> <version>1.2.0</version> </dependency>
<dependency> <groupId>backtype</groupId> <artifactId>twitter4j-core</artifactId>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/IntervalCheck.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/IntervalCheck.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/IntervalCheck.java
old mode 100644
new mode 100755
index 005fbdf..0b36abf
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/IntervalCheck.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/IntervalCheck.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example;
import java.io.Serializable;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/TpsCounter.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/TpsCounter.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/TpsCounter.java
old mode 100644
new mode 100755
index f9943a5..262ef8c
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/TpsCounter.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/TpsCounter.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example;
import java.io.Serializable;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBatchTopology.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBatchTopology.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBatchTopology.java
old mode 100644
new mode 100755
index e4cda48..d225ad0
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBatchTopology.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBatchTopology.java
@@ -1,54 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.batch;
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.Map;
-
-import org.yaml.snakeyaml.Yaml;
-
-import backtype.storm.Config;
-import backtype.storm.LocalCluster;
-import backtype.storm.StormSubmitter;
-import backtype.storm.generated.AlreadyAliveException;
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.TopologyAssignException;
-import backtype.storm.topology.BoltDeclarer;
+import java.util.Map;
+
+import com.alibaba.jstorm.batch.BatchTopologyBuilder;
+import com.alibaba.jstorm.cluster.StormConfig;
+import com.alibaba.jstorm.utils.JStormUtils;
+import com.alibaba.jstorm.utils.LoadConf;
+
+import backtype.storm.LocalCluster;
+import backtype.storm.StormSubmitter;
+import backtype.storm.generated.AlreadyAliveException;
+import backtype.storm.generated.InvalidTopologyException;
+import backtype.storm.generated.TopologyAssignException;
+import backtype.storm.topology.BoltDeclarer;
import backtype.storm.topology.TopologyBuilder;
-import com.alibaba.jstorm.batch.BatchTopologyBuilder;
-import com.alibaba.jstorm.cluster.StormConfig;
-import com.alibaba.jstorm.utils.JStormUtils;
-
public class SimpleBatchTopology {
- private static String topologyName;
+ private static String topologyName = "Batch";
private static Map conf;
- private static void LoadYaml(String confPath) {
-
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
-
- conf = (Map) yaml.load(stream);
- if (conf == null || conf.isEmpty() == true) {
- throw new RuntimeException("Failed to read config file");
- }
-
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- topologyName = (String) conf.get(Config.TOPOLOGY_NAME);
- return;
- }
public static TopologyBuilder SetBuilder() {
BatchTopologyBuilder topologyBuilder = new BatchTopologyBuilder(
@@ -72,7 +61,7 @@ public class SimpleBatchTopology {
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(topologyName, conf, builder.createTopology());
- Thread.sleep(600000);
+ Thread.sleep(60000);
cluster.shutdown();
}
@@ -94,7 +83,8 @@ public class SimpleBatchTopology {
System.exit(-1);
}
- LoadYaml(args[0]);
+ conf = LoadConf.LoadYaml(args[0]);
+
boolean isLocal = StormConfig.local_mode(conf);
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBolt.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBolt.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBolt.java
old mode 100644
new mode 100755
index 6587076..67259d9
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBolt.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleBolt.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.batch;
import java.util.Map;
@@ -102,7 +119,7 @@ public class SimpleBolt implements IBasicBolt, ICommitter {
public void revert(BatchId id, byte[] commitResult) {
LOG.info("Receive BatchId " + id);
- BatchId failedId = (BatchId)Utils.deserialize(commitResult);
+ BatchId failedId = (BatchId)Utils.javaDeserialize(commitResult);
if (failedId.equals(id) == false) {
LOG.info("Deserialized error " + id);
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleSpout.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleSpout.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleSpout.java
old mode 100644
new mode 100755
index 4fdb5a3..3b16d80
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleSpout.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/batch/SimpleSpout.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.batch;
import java.util.Map;
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/ReachTopology.java
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/ReachTopology.java b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/ReachTopology.java
old mode 100644
new mode 100755
index 8e03837..f7d39f4
--- a/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/ReachTopology.java
+++ b/example/sequence-split-merge/src/main/java/com/alipay/dw/jstorm/example/drpc/ReachTopology.java
@@ -1,3 +1,20 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
package com.alipay.dw.jstorm.example.drpc;
import java.util.Arrays;
@@ -7,6 +24,9 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
+import com.alibaba.jstorm.utils.JStormUtils;
+import com.alibaba.jstorm.utils.LoadConf;
+
import backtype.storm.Config;
import backtype.storm.LocalCluster;
import backtype.storm.LocalDRPC;
@@ -22,8 +42,6 @@ import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
-import com.alibaba.jstorm.utils.JStormUtils;
-
/**
* This is a good example of doing complex Distributed RPC on top of Storm. This
* program creates a topology that can compute the reach for any URL on Twitter
@@ -166,14 +184,27 @@ public class ReachTopology {
return builder;
}
+
+
+
public static void main(String[] args) throws Exception {
LinearDRPCTopologyBuilder builder = construct();
-
-
+
Config conf = new Config();
conf.setNumWorkers(6);
- if (args.length == 0) {
+ if (args.length != 0) {
+
+ try {
+ Map yamlConf = LoadConf.LoadYaml(args[0]);
+ if (yamlConf != null) {
+ conf.putAll(yamlConf);
+ }
+ }catch (Exception e) {
+ System.out.println("Input " + args[0] + " isn't one yaml ");
+ }
+
+
StormSubmitter.submitTopology(TOPOLOGY_NAME, conf, builder.createRemoteTopology());
}else {
[28/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/multilang/SpoutMsg.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/multilang/SpoutMsg.java b/jstorm-client/src/main/java/backtype/storm/multilang/SpoutMsg.java
deleted file mode 100644
index cb1b108..0000000
--- a/jstorm-client/src/main/java/backtype/storm/multilang/SpoutMsg.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package backtype.storm.multilang;
-
-/**
- * SpoutMsg is an object that represents the data sent from a shell spout to a
- * process that implements a multi-language spout. The SpoutMsg is used to send
- * a "next", "ack" or "fail" message to a spout.
- *
- * <p>
- * Spout messages are objects sent to the ISerializer interface, for
- * serialization according to the wire protocol implemented by the serializer.
- * The SpoutMsg class allows for a decoupling between the serialized
- * representation of the data and the data itself.
- * </p>
- */
-public class SpoutMsg {
- private String command;
- private Object id;
-
- public String getCommand() {
- return command;
- }
-
- public void setCommand(String command) {
- this.command = command;
- }
-
- public Object getId() {
- return id;
- }
-
- public void setId(Object id) {
- this.id = id;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/nimbus/DefaultTopologyValidator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/nimbus/DefaultTopologyValidator.java b/jstorm-client/src/main/java/backtype/storm/nimbus/DefaultTopologyValidator.java
deleted file mode 100644
index c84be4e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/nimbus/DefaultTopologyValidator.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.nimbus;
-
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.StormTopology;
-import java.util.Map;
-
-public class DefaultTopologyValidator implements ITopologyValidator {
- @Override
- public void prepare(Map StormConf) {
- }
-
- @Override
- public void validate(String topologyName, Map topologyConf,
- StormTopology topology) throws InvalidTopologyException {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/nimbus/ITopologyValidator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/nimbus/ITopologyValidator.java b/jstorm-client/src/main/java/backtype/storm/nimbus/ITopologyValidator.java
deleted file mode 100644
index f734670..0000000
--- a/jstorm-client/src/main/java/backtype/storm/nimbus/ITopologyValidator.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.nimbus;
-
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.StormTopology;
-import java.util.Map;
-
-public interface ITopologyValidator {
- void prepare(Map StormConf);
-
- void validate(String topologyName, Map topologyConf, StormTopology topology)
- throws InvalidTopologyException;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/planner/CompoundSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/planner/CompoundSpout.java b/jstorm-client/src/main/java/backtype/storm/planner/CompoundSpout.java
deleted file mode 100644
index 7e50406..0000000
--- a/jstorm-client/src/main/java/backtype/storm/planner/CompoundSpout.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.planner;
-
-public class CompoundSpout
-// implements ISpout
-{
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/planner/CompoundTask.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/planner/CompoundTask.java b/jstorm-client/src/main/java/backtype/storm/planner/CompoundTask.java
deleted file mode 100644
index 71cdb15..0000000
--- a/jstorm-client/src/main/java/backtype/storm/planner/CompoundTask.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.planner;
-
-public class CompoundTask
-// implements IBolt
-{
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/planner/TaskBundle.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/planner/TaskBundle.java b/jstorm-client/src/main/java/backtype/storm/planner/TaskBundle.java
deleted file mode 100644
index c8dadd3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/planner/TaskBundle.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.planner;
-
-import backtype.storm.task.IBolt;
-import java.io.Serializable;
-
-public class TaskBundle implements Serializable {
- public IBolt task;
- public int componentId;
-
- public TaskBundle(IBolt task, int componentId) {
- this.task = task;
- this.componentId = componentId;
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/Cluster.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/Cluster.java b/jstorm-client/src/main/java/backtype/storm/scheduler/Cluster.java
deleted file mode 100644
index b53f02d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/Cluster.java
+++ /dev/null
@@ -1,448 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-public class Cluster {
-
- /**
- * key: supervisor id, value: supervisor details
- */
- private Map<String, SupervisorDetails> supervisors;
- /**
- * key: topologyId, value: topology's current assignments.
- */
- private Map<String, SchedulerAssignmentImpl> assignments;
-
- /**
- * a map from hostname to supervisor id.
- */
- private Map<String, List<String>> hostToId;
-
- private Set<String> blackListedHosts = new HashSet<String>();
- private INimbus inimbus;
-
- public Cluster(INimbus nimbus, Map<String, SupervisorDetails> supervisors,
- Map<String, SchedulerAssignmentImpl> assignments) {
- this.inimbus = nimbus;
- this.supervisors = new HashMap<String, SupervisorDetails>(
- supervisors.size());
- this.supervisors.putAll(supervisors);
- this.assignments = new HashMap<String, SchedulerAssignmentImpl>(
- assignments.size());
- this.assignments.putAll(assignments);
- this.hostToId = new HashMap<String, List<String>>();
- for (String nodeId : supervisors.keySet()) {
- SupervisorDetails supervisor = supervisors.get(nodeId);
- String host = supervisor.getHost();
- if (!this.hostToId.containsKey(host)) {
- this.hostToId.put(host, new ArrayList<String>());
- }
- this.hostToId.get(host).add(nodeId);
- }
- }
-
- public void setBlacklistedHosts(Set<String> hosts) {
- blackListedHosts = hosts;
- }
-
- public Set<String> getBlacklistedHosts() {
- return blackListedHosts;
- }
-
- public void blacklistHost(String host) {
- // this is so it plays well with setting blackListedHosts to an
- // immutable list
- if (blackListedHosts == null)
- blackListedHosts = new HashSet<String>();
- if (!(blackListedHosts instanceof HashSet))
- blackListedHosts = new HashSet<String>(blackListedHosts);
- blackListedHosts.add(host);
- }
-
- public boolean isBlackListed(String supervisorId) {
- return blackListedHosts != null
- && blackListedHosts.contains(getHost(supervisorId));
- }
-
- public boolean isBlacklistedHost(String host) {
- return blackListedHosts != null && blackListedHosts.contains(host);
- }
-
- public String getHost(String supervisorId) {
- return inimbus.getHostName(supervisors, supervisorId);
- }
-
- /**
- * Gets all the topologies which needs scheduling.
- *
- * @param topologies
- * @return
- */
- public List<TopologyDetails> needsSchedulingTopologies(Topologies topologies) {
- List<TopologyDetails> ret = new ArrayList<TopologyDetails>();
- for (TopologyDetails topology : topologies.getTopologies()) {
- if (needsScheduling(topology)) {
- ret.add(topology);
- }
- }
-
- return ret;
- }
-
- /**
- * Does the topology need scheduling?
- *
- * A topology needs scheduling if one of the following conditions holds:
- * <ul>
- * <li>Although the topology is assigned slots, but is squeezed. i.e. the
- * topology is assigned less slots than desired.</li>
- * <li>There are unassigned executors in this topology</li>
- * </ul>
- */
- public boolean needsScheduling(TopologyDetails topology) {
- int desiredNumWorkers = topology.getNumWorkers();
- int assignedNumWorkers = this.getAssignedNumWorkers(topology);
-
- if (desiredNumWorkers > assignedNumWorkers) {
- return true;
- }
-
- return this.getUnassignedExecutors(topology).size() > 0;
- }
-
- /**
- * Gets a executor -> component-id map which needs scheduling in this
- * topology.
- *
- * @param topology
- * @return
- */
- public Map<ExecutorDetails, String> getNeedsSchedulingExecutorToComponents(
- TopologyDetails topology) {
- Collection<ExecutorDetails> allExecutors = new HashSet(
- topology.getExecutors());
-
- SchedulerAssignment assignment = this.assignments.get(topology.getId());
- if (assignment != null) {
- Collection<ExecutorDetails> assignedExecutors = assignment
- .getExecutors();
- allExecutors.removeAll(assignedExecutors);
- }
-
- return topology.selectExecutorToComponent(allExecutors);
- }
-
- /**
- * Gets a component-id -> executors map which needs scheduling in this
- * topology.
- *
- * @param topology
- * @return
- */
- public Map<String, List<ExecutorDetails>> getNeedsSchedulingComponentToExecutors(
- TopologyDetails topology) {
- Map<ExecutorDetails, String> executorToComponents = this
- .getNeedsSchedulingExecutorToComponents(topology);
- Map<String, List<ExecutorDetails>> componentToExecutors = new HashMap<String, List<ExecutorDetails>>();
- for (ExecutorDetails executor : executorToComponents.keySet()) {
- String component = executorToComponents.get(executor);
- if (!componentToExecutors.containsKey(component)) {
- componentToExecutors.put(component,
- new ArrayList<ExecutorDetails>());
- }
-
- componentToExecutors.get(component).add(executor);
- }
-
- return componentToExecutors;
- }
-
- /**
- * Get all the used ports of this supervisor.
- *
- * @param cluster
- * @return
- */
- public Set<Integer> getUsedPorts(SupervisorDetails supervisor) {
- Map<String, SchedulerAssignment> assignments = this.getAssignments();
- Set<Integer> usedPorts = new HashSet<Integer>();
-
- for (SchedulerAssignment assignment : assignments.values()) {
- for (WorkerSlot slot : assignment.getExecutorToSlot().values()) {
- if (slot.getNodeId().equals(supervisor.getId())) {
- usedPorts.add(slot.getPort());
- }
- }
- }
-
- return usedPorts;
- }
-
- /**
- * Return the available ports of this supervisor.
- *
- * @param cluster
- * @return
- */
- public Set<Integer> getAvailablePorts(SupervisorDetails supervisor) {
- Set<Integer> usedPorts = this.getUsedPorts(supervisor);
-
- Set<Integer> ret = new HashSet();
- ret.addAll(getAssignablePorts(supervisor));
- ret.removeAll(usedPorts);
-
- return ret;
- }
-
- public Set<Integer> getAssignablePorts(SupervisorDetails supervisor) {
- if (isBlackListed(supervisor.id))
- return new HashSet();
- return supervisor.allPorts;
- }
-
- /**
- * Return all the available slots on this supervisor.
- *
- * @param cluster
- * @return
- */
- public List<WorkerSlot> getAvailableSlots(SupervisorDetails supervisor) {
- Set<Integer> ports = this.getAvailablePorts(supervisor);
- List<WorkerSlot> slots = new ArrayList<WorkerSlot>(ports.size());
-
- for (Integer port : ports) {
- slots.add(new WorkerSlot(supervisor.getId(), port));
- }
-
- return slots;
- }
-
- public List<WorkerSlot> getAssignableSlots(SupervisorDetails supervisor) {
- Set<Integer> ports = this.getAssignablePorts(supervisor);
- List<WorkerSlot> slots = new ArrayList<WorkerSlot>(ports.size());
-
- for (Integer port : ports) {
- slots.add(new WorkerSlot(supervisor.getId(), port));
- }
-
- return slots;
- }
-
- /**
- * get the unassigned executors of the topology.
- */
- public Collection<ExecutorDetails> getUnassignedExecutors(
- TopologyDetails topology) {
- if (topology == null) {
- return new ArrayList<ExecutorDetails>(0);
- }
-
- Collection<ExecutorDetails> ret = new HashSet(topology.getExecutors());
-
- SchedulerAssignment assignment = this.getAssignmentById(topology
- .getId());
- if (assignment != null) {
- Set<ExecutorDetails> assignedExecutors = assignment.getExecutors();
- ret.removeAll(assignedExecutors);
- }
-
- return ret;
- }
-
- /**
- * Gets the number of workers assigned to this topology.
- *
- * @param topology
- * @return
- */
- public int getAssignedNumWorkers(TopologyDetails topology) {
- if (topology == null) {
- return 0;
- }
- SchedulerAssignment assignment = this.getAssignmentById(topology
- .getId());
- if (assignment == null) {
- return 0;
- }
-
- Set<WorkerSlot> slots = new HashSet<WorkerSlot>();
- slots.addAll(assignment.getExecutorToSlot().values());
-
- return slots.size();
- }
-
- /**
- * Assign the slot to the executors for this topology.
- *
- * @throws RuntimeException
- * if the specified slot is already occupied.
- */
- public void assign(WorkerSlot slot, String topologyId,
- Collection<ExecutorDetails> executors) {
- if (this.isSlotOccupied(slot)) {
- throw new RuntimeException("slot: [" + slot.getNodeId() + ", "
- + slot.getPort() + "] is already occupied.");
- }
-
- SchedulerAssignmentImpl assignment = (SchedulerAssignmentImpl) this
- .getAssignmentById(topologyId);
- if (assignment == null) {
- assignment = new SchedulerAssignmentImpl(topologyId,
- new HashMap<ExecutorDetails, WorkerSlot>());
- this.assignments.put(topologyId, assignment);
- } else {
- for (ExecutorDetails executor : executors) {
- if (assignment.isExecutorAssigned(executor)) {
- throw new RuntimeException(
- "the executor is already assigned, you should unassign it before assign it to another slot.");
- }
- }
- }
-
- assignment.assign(slot, executors);
- }
-
- /**
- * Gets all the available slots in the cluster.
- *
- * @return
- */
- public List<WorkerSlot> getAvailableSlots() {
- List<WorkerSlot> slots = new ArrayList<WorkerSlot>();
- for (SupervisorDetails supervisor : this.supervisors.values()) {
- slots.addAll(this.getAvailableSlots(supervisor));
- }
-
- return slots;
- }
-
- public List<WorkerSlot> getAssignableSlots() {
- List<WorkerSlot> slots = new ArrayList<WorkerSlot>();
- for (SupervisorDetails supervisor : this.supervisors.values()) {
- slots.addAll(this.getAssignableSlots(supervisor));
- }
-
- return slots;
- }
-
- /**
- * Free the specified slot.
- *
- * @param slot
- */
- public void freeSlot(WorkerSlot slot) {
- // remove the slot from the existing assignments
- for (SchedulerAssignmentImpl assignment : this.assignments.values()) {
- if (assignment.isSlotOccupied(slot)) {
- assignment.unassignBySlot(slot);
- }
- }
- }
-
- /**
- * free the slots.
- *
- * @param slots
- */
- public void freeSlots(Collection<WorkerSlot> slots) {
- if (slots != null) {
- for (WorkerSlot slot : slots) {
- this.freeSlot(slot);
- }
- }
- }
-
- /**
- * Checks the specified slot is occupied.
- *
- * @param slot
- * the slot be to checked.
- * @return
- */
- public boolean isSlotOccupied(WorkerSlot slot) {
- for (SchedulerAssignment assignment : this.assignments.values()) {
- if (assignment.isSlotOccupied(slot)) {
- return true;
- }
- }
-
- return false;
- }
-
- /**
- * get the current assignment for the topology.
- */
- public SchedulerAssignment getAssignmentById(String topologyId) {
- if (this.assignments.containsKey(topologyId)) {
- return this.assignments.get(topologyId);
- }
-
- return null;
- }
-
- /**
- * Get a specific supervisor with the <code>nodeId</code>
- */
- public SupervisorDetails getSupervisorById(String nodeId) {
- if (this.supervisors.containsKey(nodeId)) {
- return this.supervisors.get(nodeId);
- }
-
- return null;
- }
-
- public Collection<WorkerSlot> getUsedSlots() {
- Set<WorkerSlot> ret = new HashSet();
- for (SchedulerAssignmentImpl s : assignments.values()) {
- ret.addAll(s.getExecutorToSlot().values());
- }
- return ret;
- }
-
- /**
- * Get all the supervisors on the specified <code>host</code>.
- *
- * @param host
- * hostname of the supervisor
- * @return the <code>SupervisorDetails</code> object.
- */
- public List<SupervisorDetails> getSupervisorsByHost(String host) {
- List<String> nodeIds = this.hostToId.get(host);
- List<SupervisorDetails> ret = new ArrayList<SupervisorDetails>();
-
- if (nodeIds != null) {
- for (String nodeId : nodeIds) {
- ret.add(this.getSupervisorById(nodeId));
- }
- }
-
- return ret;
- }
-
- /**
- * Get all the assignments.
- */
- public Map<String, SchedulerAssignment> getAssignments() {
- Map<String, SchedulerAssignment> ret = new HashMap<String, SchedulerAssignment>(
- this.assignments.size());
-
- for (String topologyId : this.assignments.keySet()) {
- ret.put(topologyId, this.assignments.get(topologyId));
- }
-
- return ret;
- }
-
- /**
- * Get all the supervisors.
- */
- public Map<String, SupervisorDetails> getSupervisors() {
- return this.supervisors;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/ExecutorDetails.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/ExecutorDetails.java b/jstorm-client/src/main/java/backtype/storm/scheduler/ExecutorDetails.java
deleted file mode 100644
index fb04d84..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/ExecutorDetails.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package backtype.storm.scheduler;
-
-public class ExecutorDetails {
- int startTask;
- int endTask;
-
- public ExecutorDetails(int startTask, int endTask) {
- this.startTask = startTask;
- this.endTask = endTask;
- }
-
- public int getStartTask() {
- return startTask;
- }
-
- public int getEndTask() {
- return endTask;
- }
-
- public boolean equals(Object other) {
- if (other == null || !(other instanceof ExecutorDetails)) {
- return false;
- }
-
- ExecutorDetails executor = (ExecutorDetails) other;
- return (this.startTask == executor.startTask)
- && (this.endTask == executor.endTask);
- }
-
- public int hashCode() {
- return this.startTask + 13 * this.endTask;
- }
-
- @Override
- public String toString() {
- return "[" + this.startTask + ", " + this.endTask + "]";
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/INimbus.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/INimbus.java b/jstorm-client/src/main/java/backtype/storm/scheduler/INimbus.java
deleted file mode 100644
index 2d81b98..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/INimbus.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Collection;
-import java.util.Map;
-import java.util.Set;
-
-public interface INimbus {
- void prepare(Map stormConf, String schedulerLocalDir);
-
- /**
- * Returns all slots that are available for the next round of scheduling. A
- * slot is available for scheduling if it is free and can be assigned to, or
- * if it is used and can be reassigned.
- */
- Collection<WorkerSlot> allSlotsAvailableForScheduling(
- Collection<SupervisorDetails> existingSupervisors,
- Topologies topologies, Set<String> topologiesMissingAssignments);
-
- // this is called after the assignment is changed in ZK
- void assignSlots(Topologies topologies,
- Map<String, Collection<WorkerSlot>> newSlotsByTopologyId);
-
- // map from node id to supervisor details
- String getHostName(Map<String, SupervisorDetails> existingSupervisors,
- String nodeId);
-
- IScheduler getForcedScheduler();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/IScheduler.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/IScheduler.java b/jstorm-client/src/main/java/backtype/storm/scheduler/IScheduler.java
deleted file mode 100644
index fb68499..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/IScheduler.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Map;
-
-public interface IScheduler {
-
- void prepare(Map conf);
-
- /**
- * Set assignments for the topologies which needs scheduling. The new
- * assignments is available through <code>cluster.getAssignments()</code>
- *
- * @param topologies
- * all the topologies in the cluster, some of them need schedule.
- * Topologies object here only contain static information about
- * topologies. Information like assignments, slots are all in the
- * <code>cluster</code>object.
- * @param cluster
- * the cluster these topologies are running in.
- * <code>cluster</code> contains everything user need to develop
- * a new scheduling logic. e.g. supervisors information,
- * available slots, current assignments for all the topologies
- * etc. User can set the new assignment for topologies using
- * <code>cluster.setAssignmentById</code>
- */
- void schedule(Topologies topologies, Cluster cluster);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/ISupervisor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/ISupervisor.java b/jstorm-client/src/main/java/backtype/storm/scheduler/ISupervisor.java
deleted file mode 100644
index 30548c8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/ISupervisor.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Map;
-import java.util.Collection;
-
-public interface ISupervisor {
- void prepare(Map stormConf, String schedulerLocalDir);
-
- // for mesos, this is {hostname}-{topologyid}
- /**
- * The id used for writing metadata into ZK.
- */
- String getSupervisorId();
-
- /**
- * The id used in assignments. This combined with confirmAssigned decides
- * what this supervisor is responsible for. The combination of this and
- * getSupervisorId allows Nimbus to assign to a single machine and have
- * multiple supervisors on that machine execute the assignment. This is
- * important for achieving resource isolation.
- */
- String getAssignmentId();
-
- Object getMetadata();
-
- boolean confirmAssigned(int port);
-
- // calls this before actually killing the worker locally...
- // sends a "task finished" update
- void killedWorker(int port);
-
- void assigned(Collection<Integer> ports);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignment.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignment.java b/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignment.java
deleted file mode 100644
index bf3b2cb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignment.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Map;
-import java.util.Set;
-
-public interface SchedulerAssignment {
- /**
- * Does this slot occupied by this assignment?
- *
- * @param slot
- * @return
- */
- public boolean isSlotOccupied(WorkerSlot slot);
-
- /**
- * is the executor assigned?
- *
- * @param executor
- * @return
- */
- public boolean isExecutorAssigned(ExecutorDetails executor);
-
- /**
- * get the topology-id this assignment is for.
- *
- * @return
- */
- public String getTopologyId();
-
- /**
- * get the executor -> slot map.
- *
- * @return
- */
- public Map<ExecutorDetails, WorkerSlot> getExecutorToSlot();
-
- /**
- * Return the executors covered by this assignments
- *
- * @return
- */
- public Set<ExecutorDetails> getExecutors();
-
- public Set<WorkerSlot> getSlots();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java b/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java
deleted file mode 100644
index 6514863..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/SchedulerAssignmentImpl.java
+++ /dev/null
@@ -1,98 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-//TODO: improve this by maintaining slot -> executors as well for more efficient operations
-public class SchedulerAssignmentImpl implements SchedulerAssignment {
- /**
- * topology-id this assignment is for.
- */
- String topologyId;
- /**
- * assignment detail, a mapping from executor to <code>WorkerSlot</code>
- */
- Map<ExecutorDetails, WorkerSlot> executorToSlot;
-
- public SchedulerAssignmentImpl(String topologyId,
- Map<ExecutorDetails, WorkerSlot> executorToSlots) {
- this.topologyId = topologyId;
- this.executorToSlot = new HashMap<ExecutorDetails, WorkerSlot>(0);
- if (executorToSlots != null) {
- this.executorToSlot.putAll(executorToSlots);
- }
- }
-
- @Override
- public Set<WorkerSlot> getSlots() {
- return new HashSet(executorToSlot.values());
- }
-
- /**
- * Assign the slot to executors.
- *
- * @param slot
- * @param executors
- */
- public void assign(WorkerSlot slot, Collection<ExecutorDetails> executors) {
- for (ExecutorDetails executor : executors) {
- this.executorToSlot.put(executor, slot);
- }
- }
-
- /**
- * Release the slot occupied by this assignment.
- *
- * @param slot
- */
- public void unassignBySlot(WorkerSlot slot) {
- List<ExecutorDetails> executors = new ArrayList<ExecutorDetails>();
- for (ExecutorDetails executor : this.executorToSlot.keySet()) {
- WorkerSlot ws = this.executorToSlot.get(executor);
- if (ws.equals(slot)) {
- executors.add(executor);
- }
- }
-
- // remove
- for (ExecutorDetails executor : executors) {
- this.executorToSlot.remove(executor);
- }
- }
-
- /**
- * Does this slot occupied by this assignment?
- *
- * @param slot
- * @return
- */
- public boolean isSlotOccupied(WorkerSlot slot) {
- return this.executorToSlot.containsValue(slot);
- }
-
- public boolean isExecutorAssigned(ExecutorDetails executor) {
- return this.executorToSlot.containsKey(executor);
- }
-
- public String getTopologyId() {
- return this.topologyId;
- }
-
- public Map<ExecutorDetails, WorkerSlot> getExecutorToSlot() {
- return this.executorToSlot;
- }
-
- /**
- * Return the executors covered by this assignments
- *
- * @return
- */
- public Set<ExecutorDetails> getExecutors() {
- return this.executorToSlot.keySet();
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/SupervisorDetails.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/SupervisorDetails.java b/jstorm-client/src/main/java/backtype/storm/scheduler/SupervisorDetails.java
deleted file mode 100644
index adb81cf..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/SupervisorDetails.java
+++ /dev/null
@@ -1,73 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Collection;
-import java.util.HashSet;
-import java.util.Set;
-
-public class SupervisorDetails {
-
- String id;
- /**
- * hostname of this supervisor
- */
- String host;
- Object meta;
- /**
- * meta data configured for this supervisor
- */
- Object schedulerMeta;
- /**
- * all the ports of the supervisor
- */
- Set<Integer> allPorts;
-
- public SupervisorDetails(String id, Object meta) {
- this.id = id;
- this.meta = meta;
- allPorts = new HashSet();
- }
-
- public SupervisorDetails(String id, Object meta, Collection<Number> allPorts) {
- this.id = id;
- this.meta = meta;
- setAllPorts(allPorts);
- }
-
- public SupervisorDetails(String id, String host, Object schedulerMeta,
- Collection<Number> allPorts) {
- this.id = id;
- this.host = host;
- this.schedulerMeta = schedulerMeta;
-
- setAllPorts(allPorts);
- }
-
- private void setAllPorts(Collection<Number> allPorts) {
- this.allPorts = new HashSet<Integer>();
- if (allPorts != null) {
- for (Number n : allPorts) {
- this.allPorts.add(n.intValue());
- }
- }
- }
-
- public String getId() {
- return id;
- }
-
- public String getHost() {
- return host;
- }
-
- public Object getMeta() {
- return meta;
- }
-
- public Set<Integer> getAllPorts() {
- return allPorts;
- }
-
- public Object getSchedulerMeta() {
- return this.schedulerMeta;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/Topologies.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/Topologies.java b/jstorm-client/src/main/java/backtype/storm/scheduler/Topologies.java
deleted file mode 100644
index 883213c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/Topologies.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-public class Topologies {
- Map<String, TopologyDetails> topologies;
- Map<String, String> nameToId;
-
- public Topologies(Map<String, TopologyDetails> topologies) {
- if (topologies == null)
- topologies = new HashMap();
- this.topologies = new HashMap<String, TopologyDetails>(
- topologies.size());
- this.topologies.putAll(topologies);
- this.nameToId = new HashMap<String, String>(topologies.size());
-
- for (String topologyId : topologies.keySet()) {
- TopologyDetails topology = topologies.get(topologyId);
- this.nameToId.put(topology.getName(), topologyId);
- }
- }
-
- public TopologyDetails getById(String topologyId) {
- return this.topologies.get(topologyId);
- }
-
- public TopologyDetails getByName(String topologyName) {
- String topologyId = this.nameToId.get(topologyName);
-
- if (topologyId == null) {
- return null;
- } else {
- return this.getById(topologyId);
- }
- }
-
- public Collection<TopologyDetails> getTopologies() {
- return this.topologies.values();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/TopologyDetails.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/TopologyDetails.java b/jstorm-client/src/main/java/backtype/storm/scheduler/TopologyDetails.java
deleted file mode 100644
index fe9138c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/TopologyDetails.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.Map;
-
-import backtype.storm.Config;
-import backtype.storm.generated.StormTopology;
-
-public class TopologyDetails {
- String topologyId;
- Map topologyConf;
- StormTopology topology;
- Map<ExecutorDetails, String> executorToComponent;
- int numWorkers;
-
- public TopologyDetails(String topologyId, Map topologyConf,
- StormTopology topology, int numWorkers) {
- this.topologyId = topologyId;
- this.topologyConf = topologyConf;
- this.topology = topology;
- this.numWorkers = numWorkers;
- }
-
- public TopologyDetails(String topologyId, Map topologyConf,
- StormTopology topology, int numWorkers,
- Map<ExecutorDetails, String> executorToComponents) {
- this(topologyId, topologyConf, topology, numWorkers);
- this.executorToComponent = new HashMap<ExecutorDetails, String>(0);
- if (executorToComponents != null) {
- this.executorToComponent.putAll(executorToComponents);
- }
- }
-
- public String getId() {
- return topologyId;
- }
-
- public String getName() {
- return (String) this.topologyConf.get(Config.TOPOLOGY_NAME);
- }
-
- public Map getConf() {
- return topologyConf;
- }
-
- public int getNumWorkers() {
- return numWorkers;
- }
-
- public StormTopology getTopology() {
- return topology;
- }
-
- public Map<ExecutorDetails, String> getExecutorToComponent() {
- return this.executorToComponent;
- }
-
- public Map<ExecutorDetails, String> selectExecutorToComponent(
- Collection<ExecutorDetails> executors) {
- Map<ExecutorDetails, String> ret = new HashMap<ExecutorDetails, String>(
- executors.size());
- for (ExecutorDetails executor : executors) {
- String compId = this.executorToComponent.get(executor);
- if (compId != null) {
- ret.put(executor, compId);
- }
- }
-
- return ret;
- }
-
- public Collection<ExecutorDetails> getExecutors() {
- return this.executorToComponent.keySet();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/scheduler/WorkerSlot.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/scheduler/WorkerSlot.java b/jstorm-client/src/main/java/backtype/storm/scheduler/WorkerSlot.java
deleted file mode 100644
index a89abab..0000000
--- a/jstorm-client/src/main/java/backtype/storm/scheduler/WorkerSlot.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package backtype.storm.scheduler;
-
-import java.io.Serializable;
-
-public class WorkerSlot implements Comparable<WorkerSlot>, Serializable {
-
- private static final long serialVersionUID = -4451854497340313268L;
- String nodeId;
- int port;
-
- public WorkerSlot(String nodeId, Number port) {
- this.nodeId = nodeId;
- this.port = port.intValue();
- }
-
- public WorkerSlot() {
-
- }
-
- public String getNodeId() {
- return nodeId;
- }
-
- public int getPort() {
- return port;
- }
-
- public void setNodeId(String nodeId) {
- this.nodeId = nodeId;
- }
-
- public void setPort(int port) {
- this.port = port;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((nodeId == null) ? 0 : nodeId.hashCode());
- result = prime * result + port;
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- WorkerSlot other = (WorkerSlot) obj;
- if (nodeId == null) {
- if (other.nodeId != null)
- return false;
- } else if (!nodeId.equals(other.nodeId))
- return false;
- if (port != other.port)
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return this.nodeId + ":" + this.port;
- }
-
- @Override
- public int compareTo(WorkerSlot o) {
- String otherNode = o.getNodeId();
- if (nodeId == null) {
- if (otherNode != null) {
- return -1;
- } else {
- return port - o.getPort();
- }
- } else {
- int ret = nodeId.compareTo(otherNode);
- if (ret == 0) {
- return port - o.getPort();
- } else {
- return ret;
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/AuthUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/AuthUtils.java b/jstorm-client/src/main/java/backtype/storm/security/auth/AuthUtils.java
deleted file mode 100644
index 5306933..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/AuthUtils.java
+++ /dev/null
@@ -1,94 +0,0 @@
-package backtype.storm.security.auth;
-
-import backtype.storm.Config;
-import javax.security.auth.login.Configuration;
-import javax.security.auth.login.AppConfigurationEntry;
-import java.security.NoSuchAlgorithmException;
-import java.security.URIParameter;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import java.io.File;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.net.URI;
-import java.util.Map;
-
-public class AuthUtils {
- private static final Logger LOG = LoggerFactory.getLogger(AuthUtils.class);
- public static final String LOGIN_CONTEXT_SERVER = "StormServer";
- public static final String LOGIN_CONTEXT_CLIENT = "StormClient";
- public static final String SERVICE = "storm_thrift_server";
-
- /**
- * Construct a JAAS configuration object per storm configuration file
- *
- * @param storm_conf
- * Storm configuration
- * @return JAAS configuration object
- */
- public static Configuration GetConfiguration(Map storm_conf) {
- Configuration login_conf = null;
-
- // find login file configuration from Storm configuration
- String loginConfigurationFile = (String) storm_conf
- .get("java.security.auth.login.config");
- if ((loginConfigurationFile != null)
- && (loginConfigurationFile.length() > 0)) {
- try {
- URI config_uri = new File(loginConfigurationFile).toURI();
- login_conf = Configuration.getInstance("JavaLoginConfig",
- new URIParameter(config_uri));
- } catch (NoSuchAlgorithmException ex1) {
- if (ex1.getCause() instanceof FileNotFoundException)
- throw new RuntimeException("configuration file "
- + loginConfigurationFile + " could not be found");
- else
- throw new RuntimeException(ex1);
- } catch (Exception ex2) {
- throw new RuntimeException(ex2);
- }
- }
-
- return login_conf;
- }
-
- /**
- * Construct a transport plugin per storm configuration
- *
- * @param conf
- * storm configuration
- * @return
- */
- public static ITransportPlugin GetTransportPlugin(Map storm_conf,
- Configuration login_conf) {
- ITransportPlugin transportPlugin = null;
- try {
- String transport_plugin_klassName = (String) storm_conf
- .get(Config.STORM_THRIFT_TRANSPORT_PLUGIN);
- Class klass = Class.forName(transport_plugin_klassName);
- transportPlugin = (ITransportPlugin) klass.newInstance();
- transportPlugin.prepare(storm_conf, login_conf);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- return transportPlugin;
- }
-
- public static String get(Configuration configuration, String section,
- String key) throws IOException {
- AppConfigurationEntry configurationEntries[] = configuration
- .getAppConfigurationEntry(section);
- if (configurationEntries == null) {
- String errorMessage = "Could not find a '" + section
- + "' entry in this configuration.";
- throw new IOException(errorMessage);
- }
-
- for (AppConfigurationEntry entry : configurationEntries) {
- Object val = entry.getOptions().get(key);
- if (val != null)
- return (String) val;
- }
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/IAuthorizer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/IAuthorizer.java b/jstorm-client/src/main/java/backtype/storm/security/auth/IAuthorizer.java
deleted file mode 100644
index 90b17d0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/IAuthorizer.java
+++ /dev/null
@@ -1,38 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.util.Map;
-
-/**
- * Nimbus could be configured with an authorization plugin. If not specified,
- * all requests are authorized.
- *
- * You could specify the authorization plugin via storm parameter. For example:
- * storm -c
- * nimbus.authorization.class=backtype.storm.security.auth.NoopAuthorizer ...
- *
- * You could also specify it via storm.yaml: nimbus.authorization.class:
- * backtype.storm.security.auth.NoopAuthorizer
- */
-public interface IAuthorizer {
- /**
- * Invoked once immediately after construction
- *
- * @param conf
- * Storm configuration
- */
- void prepare(Map storm_conf);
-
- /**
- * permit() method is invoked for each incoming Thrift request.
- *
- * @param context
- * request context includes info about
- * @param operation
- * operation name
- * @param topology_storm
- * configuration of targeted topology
- * @return true if the request is authorized, false if reject
- */
- public boolean permit(ReqContext context, String operation,
- Map topology_conf);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/ITransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/ITransportPlugin.java b/jstorm-client/src/main/java/backtype/storm/security/auth/ITransportPlugin.java
deleted file mode 100644
index 3e4a35a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/ITransportPlugin.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.io.IOException;
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.server.TServer;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-
-/**
- * Interface for Thrift Transport plugin
- */
-public interface ITransportPlugin {
- /**
- * Invoked once immediately after construction
- *
- * @param storm_conf
- * Storm configuration
- * @param login_conf
- * login configuration
- */
- void prepare(Map storm_conf, Configuration login_conf);
-
- /**
- * Create a server associated with a given port and service handler
- *
- * @param port
- * listening port
- * @param processor
- * service handler
- * @return server to be binded
- */
- public TServer getServer(int port, TProcessor processor)
- throws IOException, TTransportException;
-
- /**
- * Connect to the specified server via framed transport
- *
- * @param transport
- * The underlying Thrift transport.
- * @param serverHost
- * server host
- */
- public TTransport connect(TTransport transport, String serverHost)
- throws IOException, TTransportException;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/ReqContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/ReqContext.java b/jstorm-client/src/main/java/backtype/storm/security/auth/ReqContext.java
deleted file mode 100644
index e9afde6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/ReqContext.java
+++ /dev/null
@@ -1,91 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.net.InetAddress;
-import com.google.common.annotations.VisibleForTesting;
-import java.security.AccessControlContext;
-import java.security.AccessController;
-import java.security.Principal;
-import javax.security.auth.Subject;
-
-/**
- * context request context includes info about (1) remote address, (2) remote
- * subject and primary principal (3) request ID
- */
-public class ReqContext {
- private static final AtomicInteger uniqueId = new AtomicInteger(0);
- private Subject _subject;
- private InetAddress _remoteAddr;
- private Integer _reqID;
- private Map _storm_conf;
-
- /**
- * Get a request context associated with current thread
- *
- * @return
- */
- public static ReqContext context() {
- return ctxt.get();
- }
-
- // each thread will have its own request context
- private static final ThreadLocal<ReqContext> ctxt = new ThreadLocal<ReqContext>() {
- @Override
- protected ReqContext initialValue() {
- return new ReqContext(AccessController.getContext());
- }
- };
-
- // private constructor
- @VisibleForTesting
- ReqContext(AccessControlContext acl_ctxt) {
- _subject = Subject.getSubject(acl_ctxt);
- _reqID = uniqueId.incrementAndGet();
- }
-
- /**
- * client address
- */
- public void setRemoteAddress(InetAddress addr) {
- _remoteAddr = addr;
- }
-
- public InetAddress remoteAddress() {
- return _remoteAddr;
- }
-
- /**
- * Set remote subject explicitly
- */
- public void setSubject(Subject subject) {
- _subject = subject;
- }
-
- /**
- * Retrieve client subject associated with this request context
- */
- public Subject subject() {
- return _subject;
- }
-
- /**
- * The primary principal associated current subject
- */
- public Principal principal() {
- if (_subject == null)
- return null;
- Set<Principal> princs = _subject.getPrincipals();
- if (princs.size() == 0)
- return null;
- return (Principal) (princs.toArray()[0]);
- }
-
- /**
- * request ID of this request
- */
- public Integer requestID() {
- return _reqID;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/SaslTransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/SaslTransportPlugin.java b/jstorm-client/src/main/java/backtype/storm/security/auth/SaslTransportPlugin.java
deleted file mode 100644
index 31a4fef..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/SaslTransportPlugin.java
+++ /dev/null
@@ -1,150 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.io.IOException;
-import java.net.Socket;
-import java.security.Principal;
-import java.util.Map;
-
-import javax.security.auth.Subject;
-import javax.security.auth.login.Configuration;
-import javax.security.sasl.SaslServer;
-
-import org.apache.thrift7.TException;
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.protocol.TProtocol;
-import org.apache.thrift7.server.TServer;
-import org.apache.thrift7.server.TThreadPoolServer;
-import org.apache.thrift7.transport.TSaslServerTransport;
-import org.apache.thrift7.transport.TServerSocket;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-import org.apache.thrift7.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Base class for SASL authentication plugin.
- */
-public abstract class SaslTransportPlugin implements ITransportPlugin {
- protected Configuration login_conf;
- private static final Logger LOG = LoggerFactory
- .getLogger(SaslTransportPlugin.class);
-
- /**
- * Invoked once immediately after construction
- *
- * @param conf
- * Storm configuration
- * @param login_conf
- * login configuration
- */
- public void prepare(Map storm_conf, Configuration login_conf) {
- this.login_conf = login_conf;
- }
-
- public TServer getServer(int port, TProcessor processor)
- throws IOException, TTransportException {
- TTransportFactory serverTransportFactory = getServerTransportFactory();
-
- // define THsHaServer args
- // original: THsHaServer + TNonblockingServerSocket
- // option: TThreadPoolServer + TServerSocket
- TServerSocket serverTransport = new TServerSocket(port);
- TThreadPoolServer.Args server_args = new TThreadPoolServer.Args(
- serverTransport).processor(new TUGIWrapProcessor(processor))
- .minWorkerThreads(64).maxWorkerThreads(64)
- .protocolFactory(new TBinaryProtocol.Factory());
- if (serverTransportFactory != null)
- server_args.transportFactory(serverTransportFactory);
-
- // construct THsHaServer
- return new TThreadPoolServer(server_args);
- }
-
- /**
- * All subclass must implement this method
- *
- * @return
- * @throws IOException
- */
- protected abstract TTransportFactory getServerTransportFactory()
- throws IOException;
-
- /**
- * Processor that pulls the SaslServer object out of the transport, and
- * assumes the remote user's UGI before calling through to the original
- * processor.
- *
- * This is used on the server side to set the UGI for each specific call.
- */
- private class TUGIWrapProcessor implements TProcessor {
- final TProcessor wrapped;
-
- TUGIWrapProcessor(TProcessor wrapped) {
- this.wrapped = wrapped;
- }
-
- public boolean process(final TProtocol inProt, final TProtocol outProt)
- throws TException {
- // populating request context
- ReqContext req_context = ReqContext.context();
-
- TTransport trans = inProt.getTransport();
- // Sasl transport
- TSaslServerTransport saslTrans = (TSaslServerTransport) trans;
-
- // remote address
- TSocket tsocket = (TSocket) saslTrans.getUnderlyingTransport();
- Socket socket = tsocket.getSocket();
- req_context.setRemoteAddress(socket.getInetAddress());
-
- // remote subject
- SaslServer saslServer = saslTrans.getSaslServer();
- String authId = saslServer.getAuthorizationID();
- Subject remoteUser = new Subject();
- remoteUser.getPrincipals().add(new User(authId));
- req_context.setSubject(remoteUser);
-
- // invoke service handler
- return wrapped.process(inProt, outProt);
- }
- }
-
- public static class User implements Principal {
- private final String name;
-
- public User(String name) {
- this.name = name;
- }
-
- /**
- * Get the full name of the user.
- */
- public String getName() {
- return name;
- }
-
- @Override
- public boolean equals(Object o) {
- if (this == o) {
- return true;
- } else if (o == null || getClass() != o.getClass()) {
- return false;
- } else {
- return (name.equals(((User) o).name));
- }
- }
-
- @Override
- public int hashCode() {
- return name.hashCode();
- }
-
- @Override
- public String toString() {
- return name;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java b/jstorm-client/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java
deleted file mode 100644
index e118f7d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/SimpleTransportPlugin.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.io.IOException;
-import java.net.InetAddress;
-import java.net.Socket;
-import java.net.UnknownHostException;
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.thrift7.TException;
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.protocol.TProtocol;
-import org.apache.thrift7.server.THsHaServer;
-import org.apache.thrift7.server.TServer;
-import org.apache.thrift7.transport.TFramedTransport;
-import org.apache.thrift7.transport.TMemoryInputTransport;
-import org.apache.thrift7.transport.TNonblockingServerSocket;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Simple transport for Thrift plugin.
- *
- * This plugin is designed to be backward compatible with existing Storm code.
- */
-public class SimpleTransportPlugin implements ITransportPlugin {
- protected Configuration login_conf;
- private static final Logger LOG = LoggerFactory
- .getLogger(SimpleTransportPlugin.class);
-
- /**
- * Invoked once immediately after construction
- *
- * @param conf
- * Storm configuration
- * @param login_conf
- * login configuration
- */
- public void prepare(Map storm_conf, Configuration login_conf) {
- this.login_conf = login_conf;
- }
-
- /**
- * We will let Thrift to apply default transport factory
- */
- public TServer getServer(int port, TProcessor processor)
- throws IOException, TTransportException {
- TNonblockingServerSocket serverTransport = new TNonblockingServerSocket(
- port);
- THsHaServer.Args server_args = new THsHaServer.Args(serverTransport)
- .processor(new SimpleWrapProcessor(processor))
- .workerThreads(64)
- .protocolFactory(new TBinaryProtocol.Factory());
-
- // construct THsHaServer
- return new THsHaServer(server_args);
- }
-
- /**
- * Connect to the specified server via framed transport
- *
- * @param transport
- * The underlying Thrift transport.
- */
- public TTransport connect(TTransport transport, String serverHost)
- throws TTransportException {
- // create a framed transport
- TTransport conn = new TFramedTransport(transport);
-
- // connect
- conn.open();
- LOG.debug("Simple client transport has been established");
-
- return conn;
- }
-
- /**
- * Processor that populate simple transport info into ReqContext, and then
- * invoke a service handler
- */
- private class SimpleWrapProcessor implements TProcessor {
- final TProcessor wrapped;
-
- SimpleWrapProcessor(TProcessor wrapped) {
- this.wrapped = wrapped;
- }
-
- public boolean process(final TProtocol inProt, final TProtocol outProt)
- throws TException {
- // populating request context
- ReqContext req_context = ReqContext.context();
-
- TTransport trans = inProt.getTransport();
- if (trans instanceof TMemoryInputTransport) {
- try {
- req_context.setRemoteAddress(InetAddress.getLocalHost());
- } catch (UnknownHostException e) {
- throw new RuntimeException(e);
- }
- } else if (trans instanceof TSocket) {
- TSocket tsocket = (TSocket) trans;
- // remote address
- Socket socket = tsocket.getSocket();
- req_context.setRemoteAddress(socket.getInetAddress());
- }
-
- // anonymous user
- req_context.setSubject(null);
-
- // invoke service handler
- return wrapped.process(inProt, outProt);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftClient.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftClient.java b/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftClient.java
deleted file mode 100644
index 5a78a70..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftClient.java
+++ /dev/null
@@ -1,138 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.io.IOException;
-import java.security.InvalidParameterException;
-import java.util.List;
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.protocol.TProtocol;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
-import backtype.storm.utils.Utils;
-
-public class ThriftClient {
- private static final Logger LOG = LoggerFactory
- .getLogger(ThriftClient.class);
- private static final String MASTER_PATH = "/nimbus_master";
- private TTransport _transport;
- protected TProtocol _protocol;
- private String masterHost;
-
- private Map<Object, Object> conf;
-
- public ThriftClient(Map storm_conf) throws Exception {
- this(storm_conf, null);
- }
-
- @SuppressWarnings("unchecked")
- public ThriftClient(Map storm_conf, Integer timeout) throws Exception {
- conf = storm_conf;
- masterHost = getMaster(storm_conf, timeout);
- connect(storm_conf, timeout);
- }
-
- public static String getMaster(Map storm_conf, Integer timeout) throws Exception {
- CuratorFramework zkobj = null;
- String masterHost = null;
-
- try {
- String root = String.valueOf(storm_conf
- .get(Config.STORM_ZOOKEEPER_ROOT));
- String zkMasterDir = root + MASTER_PATH;
-
-
- zkobj = Utils.newCurator(storm_conf,
- (List<String>) storm_conf.get(Config.STORM_ZOOKEEPER_SERVERS),
- storm_conf.get(Config.STORM_ZOOKEEPER_PORT), zkMasterDir);
- zkobj.start();
- if (zkobj.checkExists().forPath("/") == null) {
- throw new RuntimeException("No alive nimbus ");
- }
-
- masterHost = new String(zkobj.getData().forPath("/"));
-
- LOG.info("masterHost:" + masterHost);
- return masterHost;
- }finally {
- if (zkobj != null) {
- zkobj.close();
- zkobj = null;
- }
- }
- }
-
- public TTransport transport() {
- return _transport;
- }
-
- protected void connect(Map storm_conf, Integer timeout)
- throws Exception {
- try {
- String[] host_port = masterHost.split(":");
- if (host_port.length != 2) {
- throw new InvalidParameterException("Host format error: "
- + masterHost);
- }
- String host = host_port[0];
- int port = Integer.parseInt(host_port[1]);
- LOG.info("Begin to connect " + host + ":" + port);
-
- // locate login configuration
- Configuration login_conf = AuthUtils.GetConfiguration(storm_conf);
-
- // construct a transport plugin
- ITransportPlugin transportPlugin = AuthUtils.GetTransportPlugin(
- storm_conf, login_conf);
-
- // create a socket with server
- if (host == null) {
- throw new IllegalArgumentException("host is not set");
- }
- if (port <= 0) {
- throw new IllegalArgumentException("invalid port: " + port);
- }
-
- TSocket socket = new TSocket(host, port);
- if (timeout != null) {
- socket.setTimeout(timeout);
- }
- final TTransport underlyingTransport = socket;
-
- // establish client-server transport via plugin
- _transport = transportPlugin.connect(underlyingTransport, host);
- } catch (IOException ex) {
- throw new RuntimeException("Create transport error");
- }
- _protocol = null;
- if (_transport != null)
- _protocol = new TBinaryProtocol(_transport);
- }
-
-
-
-
- public void close() {
- if (_transport != null)
- _transport.close();
- }
-
- public String getMasterHost() {
- return masterHost;
- }
-
- public Map<Object, Object> getConf() {
- return conf;
- }
-
- protected void flush() {
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftServer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftServer.java b/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftServer.java
deleted file mode 100644
index d1b5759..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/ThriftServer.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package backtype.storm.security.auth;
-
-import java.util.Map;
-
-import javax.security.auth.login.Configuration;
-
-import org.apache.thrift7.TProcessor;
-import org.apache.thrift7.server.TServer;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ThriftServer {
- private static final Logger LOG = LoggerFactory
- .getLogger(ThriftServer.class);
- private Map _storm_conf; // storm configuration
- protected TProcessor _processor = null;
- private int _port = 0;
- private TServer _server = null;
- private Configuration _login_conf;
-
- public ThriftServer(Map storm_conf, TProcessor processor, int port) {
- try {
- _storm_conf = storm_conf;
- _processor = processor;
- _port = port;
-
- // retrieve authentication configuration
- _login_conf = AuthUtils.GetConfiguration(_storm_conf);
- } catch (Exception x) {
- LOG.error(x.getMessage(), x);
- }
- }
-
- public void stop() {
- if (_server != null)
- _server.stop();
- }
-
- /**
- * Is ThriftServer listening to requests?
- *
- * @return
- */
- public boolean isServing() {
- if (_server == null)
- return false;
- return _server.isServing();
- }
-
- public void serve() {
- try {
- // locate our thrift transport plugin
- ITransportPlugin transportPlugin = AuthUtils.GetTransportPlugin(
- _storm_conf, _login_conf);
-
- // server
- _server = transportPlugin.getServer(_port, _processor);
-
- // start accepting requests
- _server.serve();
- } catch (Exception ex) {
- LOG.error("ThriftServer is being stopped due to: " + ex, ex);
- if (_server != null)
- _server.stop();
- Runtime.getRuntime().halt(1); // shutdown server process since we
- // could not handle Thrift requests
- // any more
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/DenyAuthorizer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/DenyAuthorizer.java b/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/DenyAuthorizer.java
deleted file mode 100644
index 9414eaf..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/DenyAuthorizer.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package backtype.storm.security.auth.authorizer;
-
-import java.util.Map;
-
-import backtype.storm.Config;
-import backtype.storm.security.auth.IAuthorizer;
-import backtype.storm.security.auth.ReqContext;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * An authorization implementation that denies everything, for testing purposes
- */
-public class DenyAuthorizer implements IAuthorizer {
- private static final Logger LOG = LoggerFactory
- .getLogger(DenyAuthorizer.class);
-
- /**
- * Invoked once immediately after construction
- *
- * @param conf
- * Storm configuration
- */
- public void prepare(Map conf) {
- }
-
- /**
- * permit() method is invoked for each incoming Thrift request
- *
- * @param contrext
- * request context
- * @param operation
- * operation name
- * @param topology_storm
- * configuration of targeted topology
- * @return true if the request is authorized, false if reject
- */
- public boolean permit(ReqContext context, String operation,
- Map topology_conf) {
- LOG.info("[req "
- + context.requestID()
- + "] Access "
- + " from: "
- + (context.remoteAddress() == null ? "null" : context
- .remoteAddress().toString()) + " principal:"
- + (context.principal() == null ? "null" : context.principal())
- + " op:" + operation + " topoology:"
- + topology_conf.get(Config.TOPOLOGY_NAME));
- return false;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/NoopAuthorizer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/NoopAuthorizer.java b/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/NoopAuthorizer.java
deleted file mode 100644
index b614835..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/authorizer/NoopAuthorizer.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package backtype.storm.security.auth.authorizer;
-
-import java.util.Map;
-
-import backtype.storm.Config;
-import backtype.storm.security.auth.IAuthorizer;
-import backtype.storm.security.auth.ReqContext;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * A no-op authorization implementation that illustrate info available for
- * authorization decisions.
- */
-public class NoopAuthorizer implements IAuthorizer {
- private static final Logger LOG = LoggerFactory
- .getLogger(NoopAuthorizer.class);
-
- /**
- * Invoked once immediately after construction
- *
- * @param conf
- * Storm configuration
- */
- public void prepare(Map conf) {
- }
-
- /**
- * permit() method is invoked for each incoming Thrift request
- *
- * @param context
- * request context includes info about
- * @param operation
- * operation name
- * @param topology_storm
- * configuration of targeted topology
- * @return true if the request is authorized, false if reject
- */
- public boolean permit(ReqContext context, String operation,
- Map topology_conf) {
- LOG.info("[req "
- + context.requestID()
- + "] Access "
- + " from: "
- + (context.remoteAddress() == null ? "null" : context
- .remoteAddress().toString()) + " principal:"
- + (context.principal() == null ? "null" : context.principal())
- + " op:" + operation + " topoology:"
- + topology_conf.get(Config.TOPOLOGY_NAME));
- return true;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ClientCallbackHandler.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ClientCallbackHandler.java b/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ClientCallbackHandler.java
deleted file mode 100644
index c15ff7d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ClientCallbackHandler.java
+++ /dev/null
@@ -1,101 +0,0 @@
-package backtype.storm.security.auth.digest;
-
-import java.io.IOException;
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.callback.NameCallback;
-import javax.security.auth.callback.PasswordCallback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.RealmCallback;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.security.auth.AuthUtils;
-
-/**
- * client side callback handler.
- */
-public class ClientCallbackHandler implements CallbackHandler {
- private static final String USERNAME = "username";
- private static final String PASSWORD = "password";
- private static final Logger LOG = LoggerFactory
- .getLogger(ClientCallbackHandler.class);
- private String _username = null;
- private String _password = null;
-
- /**
- * Constructor based on a JAAS configuration
- *
- * For digest, you should have a pair of user name and password defined.
- *
- * @param configuration
- * @throws IOException
- */
- public ClientCallbackHandler(Configuration configuration)
- throws IOException {
- if (configuration == null)
- return;
- AppConfigurationEntry configurationEntries[] = configuration
- .getAppConfigurationEntry(AuthUtils.LOGIN_CONTEXT_CLIENT);
- if (configurationEntries == null) {
- String errorMessage = "Could not find a '"
- + AuthUtils.LOGIN_CONTEXT_CLIENT
- + "' entry in this configuration: Client cannot start.";
- throw new IOException(errorMessage);
- }
-
- _password = "";
- for (AppConfigurationEntry entry : configurationEntries) {
- if (entry.getOptions().get(USERNAME) != null) {
- _username = (String) entry.getOptions().get(USERNAME);
- }
- if (entry.getOptions().get(PASSWORD) != null) {
- _password = (String) entry.getOptions().get(PASSWORD);
- }
- }
- }
-
- /**
- * This method is invoked by SASL for authentication challenges
- *
- * @param callbacks
- * a collection of challenge callbacks
- */
- public void handle(Callback[] callbacks) throws IOException,
- UnsupportedCallbackException {
- for (Callback c : callbacks) {
- if (c instanceof NameCallback) {
- LOG.debug("name callback");
- NameCallback nc = (NameCallback) c;
- nc.setName(_username);
- } else if (c instanceof PasswordCallback) {
- LOG.debug("password callback");
- PasswordCallback pc = (PasswordCallback) c;
- if (_password != null) {
- pc.setPassword(_password.toCharArray());
- }
- } else if (c instanceof AuthorizeCallback) {
- LOG.debug("authorization callback");
- AuthorizeCallback ac = (AuthorizeCallback) c;
- String authid = ac.getAuthenticationID();
- String authzid = ac.getAuthorizationID();
- if (authid.equals(authzid)) {
- ac.setAuthorized(true);
- } else {
- ac.setAuthorized(false);
- }
- if (ac.isAuthorized()) {
- ac.setAuthorizedID(authzid);
- }
- } else if (c instanceof RealmCallback) {
- RealmCallback rc = (RealmCallback) c;
- ((RealmCallback) c).setText(rc.getDefaultText());
- } else {
- throw new UnsupportedCallbackException(c);
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/digest/DigestSaslTransportPlugin.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/DigestSaslTransportPlugin.java b/jstorm-client/src/main/java/backtype/storm/security/auth/digest/DigestSaslTransportPlugin.java
deleted file mode 100644
index 05f19c5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/DigestSaslTransportPlugin.java
+++ /dev/null
@@ -1,52 +0,0 @@
-package backtype.storm.security.auth.digest;
-
-import java.io.IOException;
-
-import javax.security.auth.callback.CallbackHandler;
-
-import org.apache.thrift7.transport.TSaslClientTransport;
-import org.apache.thrift7.transport.TSaslServerTransport;
-import org.apache.thrift7.transport.TTransport;
-import org.apache.thrift7.transport.TTransportException;
-import org.apache.thrift7.transport.TTransportFactory;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.security.auth.AuthUtils;
-import backtype.storm.security.auth.SaslTransportPlugin;
-
-public class DigestSaslTransportPlugin extends SaslTransportPlugin {
- public static final String DIGEST = "DIGEST-MD5";
- private static final Logger LOG = LoggerFactory
- .getLogger(DigestSaslTransportPlugin.class);
-
- protected TTransportFactory getServerTransportFactory() throws IOException {
- // create an authentication callback handler
- CallbackHandler serer_callback_handler = new ServerCallbackHandler(
- login_conf);
-
- // create a transport factory that will invoke our auth callback for
- // digest
- TSaslServerTransport.Factory factory = new TSaslServerTransport.Factory();
- factory.addServerDefinition(DIGEST, AuthUtils.SERVICE, "localhost",
- null, serer_callback_handler);
-
- LOG.info("SASL DIGEST-MD5 transport factory will be used");
- return factory;
- }
-
- public TTransport connect(TTransport transport, String serverHost)
- throws TTransportException, IOException {
- ClientCallbackHandler client_callback_handler = new ClientCallbackHandler(
- login_conf);
- TSaslClientTransport wrapper_transport = new TSaslClientTransport(
- DIGEST, null, AuthUtils.SERVICE, serverHost, null,
- client_callback_handler, transport);
-
- wrapper_transport.open();
- LOG.debug("SASL DIGEST-MD5 client transport has been established");
-
- return wrapper_transport;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ServerCallbackHandler.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ServerCallbackHandler.java b/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ServerCallbackHandler.java
deleted file mode 100644
index 09662e5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/auth/digest/ServerCallbackHandler.java
+++ /dev/null
@@ -1,109 +0,0 @@
-package backtype.storm.security.auth.digest;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import javax.security.auth.callback.Callback;
-import javax.security.auth.callback.CallbackHandler;
-import javax.security.auth.callback.NameCallback;
-import javax.security.auth.callback.PasswordCallback;
-import javax.security.auth.callback.UnsupportedCallbackException;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.Configuration;
-import javax.security.sasl.AuthorizeCallback;
-import javax.security.sasl.RealmCallback;
-
-import backtype.storm.security.auth.AuthUtils;
-
-/**
- * SASL server side collback handler
- */
-public class ServerCallbackHandler implements CallbackHandler {
- private static final String USER_PREFIX = "user_";
- private static final Logger LOG = LoggerFactory
- .getLogger(ServerCallbackHandler.class);
- private static final String SYSPROP_SUPER_PASSWORD = "storm.SASLAuthenticationProvider.superPassword";
-
- private String userName;
- private final Map<String, String> credentials = new HashMap<String, String>();
-
- public ServerCallbackHandler(Configuration configuration)
- throws IOException {
- if (configuration == null)
- return;
-
- AppConfigurationEntry configurationEntries[] = configuration
- .getAppConfigurationEntry(AuthUtils.LOGIN_CONTEXT_SERVER);
- if (configurationEntries == null) {
- String errorMessage = "Could not find a '"
- + AuthUtils.LOGIN_CONTEXT_SERVER
- + "' entry in this configuration: Server cannot start.";
- throw new IOException(errorMessage);
- }
- credentials.clear();
- for (AppConfigurationEntry entry : configurationEntries) {
- Map<String, ?> options = entry.getOptions();
- // Populate DIGEST-MD5 user -> password map with JAAS configuration
- // entries from the "Server" section.
- // Usernames are distinguished from other options by prefixing the
- // username with a "user_" prefix.
- for (Map.Entry<String, ?> pair : options.entrySet()) {
- String key = pair.getKey();
- if (key.startsWith(USER_PREFIX)) {
- String userName = key.substring(USER_PREFIX.length());
- credentials.put(userName, (String) pair.getValue());
- }
- }
- }
- }
-
- public void handle(Callback[] callbacks)
- throws UnsupportedCallbackException {
- for (Callback callback : callbacks) {
- if (callback instanceof NameCallback) {
- handleNameCallback((NameCallback) callback);
- } else if (callback instanceof PasswordCallback) {
- handlePasswordCallback((PasswordCallback) callback);
- } else if (callback instanceof RealmCallback) {
- handleRealmCallback((RealmCallback) callback);
- } else if (callback instanceof AuthorizeCallback) {
- handleAuthorizeCallback((AuthorizeCallback) callback);
- }
- }
- }
-
- private void handleNameCallback(NameCallback nc) {
- LOG.debug("handleNameCallback");
- userName = nc.getDefaultName();
- nc.setName(nc.getDefaultName());
- }
-
- private void handlePasswordCallback(PasswordCallback pc) {
- LOG.debug("handlePasswordCallback");
- if ("super".equals(this.userName)
- && System.getProperty(SYSPROP_SUPER_PASSWORD) != null) {
- // superuser: use Java system property for password, if available.
- pc.setPassword(System.getProperty(SYSPROP_SUPER_PASSWORD)
- .toCharArray());
- } else if (credentials.containsKey(userName)) {
- pc.setPassword(credentials.get(userName).toCharArray());
- } else {
- LOG.warn("No password found for user: " + userName);
- }
- }
-
- private void handleRealmCallback(RealmCallback rc) {
- LOG.debug("handleRealmCallback: " + rc.getDefaultText());
- rc.setText(rc.getDefaultText());
- }
-
- private void handleAuthorizeCallback(AuthorizeCallback ac) {
- String authenticationID = ac.getAuthenticationID();
- LOG.debug("Successfully authenticated client: authenticationID="
- + authenticationID);
- ac.setAuthorizedID(authenticationID);
- ac.setAuthorized(true);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/security/serialization/BlowfishTupleSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/security/serialization/BlowfishTupleSerializer.java b/jstorm-client/src/main/java/backtype/storm/security/serialization/BlowfishTupleSerializer.java
deleted file mode 100644
index 6ded0d9..0000000
--- a/jstorm-client/src/main/java/backtype/storm/security/serialization/BlowfishTupleSerializer.java
+++ /dev/null
@@ -1,80 +0,0 @@
-package backtype.storm.security.serialization;
-
-import java.util.Map;
-import org.apache.commons.codec.binary.Hex;
-import org.apache.log4j.Logger;
-
-import javax.crypto.KeyGenerator;
-import javax.crypto.SecretKey;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-import com.esotericsoftware.kryo.Serializer;
-import com.esotericsoftware.kryo.serializers.BlowfishSerializer;
-
-import backtype.storm.serialization.types.ListDelegateSerializer;
-import backtype.storm.utils.ListDelegate;
-import backtype.storm.Config;
-
-/**
- * Apply Blowfish encrption for tuple communication to bolts
- */
-public class BlowfishTupleSerializer extends Serializer<ListDelegate> {
- /**
- * The secret key (if any) for data encryption by blowfish payload
- * serialization factory (BlowfishSerializationFactory). You should use in
- * via
- * "storm -c topology.tuple.serializer.blowfish.key=YOURKEY -c topology.tuple.serializer=backtype.storm.security.serialization.BlowfishTupleSerializer jar ..."
- * .
- */
- public static String SECRET_KEY = "topology.tuple.serializer.blowfish.key";
- private static final Logger LOG = Logger
- .getLogger(BlowfishTupleSerializer.class);
- private BlowfishSerializer _serializer;
-
- public BlowfishTupleSerializer(Kryo kryo, Map storm_conf) {
- String encryption_key = null;
- try {
- encryption_key = (String) storm_conf.get(SECRET_KEY);
- LOG.debug("Blowfish serializer being constructed ...");
- if (encryption_key == null) {
- throw new RuntimeException(
- "Blowfish encryption key not specified");
- }
- byte[] bytes = Hex.decodeHex(encryption_key.toCharArray());
- _serializer = new BlowfishSerializer(new ListDelegateSerializer(),
- bytes);
- } catch (org.apache.commons.codec.DecoderException ex) {
- throw new RuntimeException("Blowfish encryption key invalid", ex);
- }
- }
-
- @Override
- public void write(Kryo kryo, Output output, ListDelegate object) {
- _serializer.write(kryo, output, object);
- }
-
- @Override
- public ListDelegate read(Kryo kryo, Input input, Class<ListDelegate> type) {
- return (ListDelegate) _serializer.read(kryo, input, type);
- }
-
- /**
- * Produce a blowfish key to be used in "Storm jar" command
- */
- public static void main(String[] args) {
- try {
- KeyGenerator kgen = KeyGenerator.getInstance("Blowfish");
- SecretKey skey = kgen.generateKey();
- byte[] raw = skey.getEncoded();
- String keyString = new String(Hex.encodeHex(raw));
- System.out.println("storm -c " + SECRET_KEY + "=" + keyString
- + " -c " + Config.TOPOLOGY_TUPLE_SERIALIZER + "="
- + BlowfishTupleSerializer.class.getName() + " ...");
- } catch (Exception ex) {
- LOG.error(ex.getMessage());
- ex.printStackTrace();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/DefaultKryoFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/DefaultKryoFactory.java b/jstorm-client/src/main/java/backtype/storm/serialization/DefaultKryoFactory.java
deleted file mode 100644
index c9afec4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/DefaultKryoFactory.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.Config;
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.Serializer;
-import java.util.Map;
-
-public class DefaultKryoFactory implements IKryoFactory {
-
- public static class KryoSerializableDefault extends Kryo {
- boolean _override = false;
-
- public void overrideDefault(boolean value) {
- _override = value;
- }
-
- @Override
- public Serializer getDefaultSerializer(Class type) {
- if (_override) {
- return new SerializableSerializer();
- } else {
- return super.getDefaultSerializer(type);
- }
- }
- }
-
- @Override
- public Kryo getKryo(Map conf) {
- KryoSerializableDefault k = new KryoSerializableDefault();
- k.setRegistrationRequired(!((Boolean) conf
- .get(Config.TOPOLOGY_FALL_BACK_ON_JAVA_SERIALIZATION)));
- k.setReferences(false);
- return k;
- }
-
- @Override
- public void preRegister(Kryo k, Map conf) {
- }
-
- public void postRegister(Kryo k, Map conf) {
- ((KryoSerializableDefault) k).overrideDefault(true);
- }
-
- @Override
- public void postDecorate(Kryo k, Map conf) {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/IKryoDecorator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/IKryoDecorator.java b/jstorm-client/src/main/java/backtype/storm/serialization/IKryoDecorator.java
deleted file mode 100644
index f2f07ad..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/IKryoDecorator.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.serialization;
-
-import com.esotericsoftware.kryo.Kryo;
-
-public interface IKryoDecorator {
- void decorate(Kryo k);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/IKryoFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/IKryoFactory.java b/jstorm-client/src/main/java/backtype/storm/serialization/IKryoFactory.java
deleted file mode 100644
index 5cb9ab3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/IKryoFactory.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package backtype.storm.serialization;
-
-import com.esotericsoftware.kryo.Kryo;
-import java.util.Map;
-
-/**
- * An interface that controls the Kryo instance used by Storm for serialization.
- * The lifecycle is:
- *
- * 1. The Kryo instance is constructed using getKryo 2. Storm registers the
- * default classes (e.g. arrays, lists, maps, etc.) 3. Storm calls preRegister
- * hook 4. Storm registers all user-defined registrations through
- * topology.kryo.register 5. Storm calls postRegister hook 6. Storm calls all
- * user-defined decorators through topology.kryo.decorators 7. Storm calls
- * postDecorate hook
- */
-public interface IKryoFactory {
- Kryo getKryo(Map conf);
-
- void preRegister(Kryo k, Map conf);
-
- void postRegister(Kryo k, Map conf);
-
- void postDecorate(Kryo k, Map conf);
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/ITupleDeserializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/ITupleDeserializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/ITupleDeserializer.java
deleted file mode 100644
index 1ae2c9f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/ITupleDeserializer.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.tuple.Tuple;
-
-import java.io.IOException;
-
-public interface ITupleDeserializer {
- Tuple deserialize(byte[] ser);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/serialization/ITupleSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/serialization/ITupleSerializer.java b/jstorm-client/src/main/java/backtype/storm/serialization/ITupleSerializer.java
deleted file mode 100644
index 9ad7cae..0000000
--- a/jstorm-client/src/main/java/backtype/storm/serialization/ITupleSerializer.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package backtype.storm.serialization;
-
-import backtype.storm.tuple.Tuple;
-
-public interface ITupleSerializer {
- /**
- * serialize targetTask before the tuple, it should be stored in 4 bytes
- *
- * @param targetTask
- * @param tuple
- * @return
- */
- byte[] serialize(Tuple tuple);
- // long crc32(Tuple tuple);
-}
[08/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java b/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
new file mode 100755
index 0000000..ebbbc6d
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/LinearDRPCTopologyBuilder.java
@@ -0,0 +1,394 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.Constants;
+import backtype.storm.ILocalDRPC;
+import backtype.storm.coordination.BatchBoltExecutor;
+import backtype.storm.coordination.CoordinatedBolt;
+import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
+import backtype.storm.coordination.CoordinatedBolt.IdStreamSpec;
+import backtype.storm.coordination.CoordinatedBolt.SourceArgs;
+import backtype.storm.coordination.IBatchBolt;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.generated.StreamInfo;
+import backtype.storm.grouping.CustomStreamGrouping;
+import backtype.storm.grouping.PartialKeyGrouping;
+import backtype.storm.topology.BaseConfigurationDeclarer;
+import backtype.storm.topology.BasicBoltExecutor;
+import backtype.storm.topology.BoltDeclarer;
+import backtype.storm.topology.IBasicBolt;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.InputDeclarer;
+import backtype.storm.topology.OutputFieldsGetter;
+import backtype.storm.topology.TopologyBuilder;
+import backtype.storm.tuple.Fields;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+
+// Trident subsumes the functionality provided by this class, so it's deprecated
+@Deprecated
+public class LinearDRPCTopologyBuilder {
+ String _function;
+ List<Component> _components = new ArrayList<Component>();
+
+
+ public LinearDRPCTopologyBuilder(String function) {
+ _function = function;
+ }
+
+ public LinearDRPCInputDeclarer addBolt(IBatchBolt bolt, Number parallelism) {
+ return addBolt(new BatchBoltExecutor(bolt), parallelism);
+ }
+
+ public LinearDRPCInputDeclarer addBolt(IBatchBolt bolt) {
+ return addBolt(bolt, 1);
+ }
+
+ @Deprecated
+ public LinearDRPCInputDeclarer addBolt(IRichBolt bolt, Number parallelism) {
+ if(parallelism==null) parallelism = 1;
+ Component component = new Component(bolt, parallelism.intValue());
+ _components.add(component);
+ return new InputDeclarerImpl(component);
+ }
+
+ @Deprecated
+ public LinearDRPCInputDeclarer addBolt(IRichBolt bolt) {
+ return addBolt(bolt, null);
+ }
+
+ public LinearDRPCInputDeclarer addBolt(IBasicBolt bolt, Number parallelism) {
+ return addBolt(new BasicBoltExecutor(bolt), parallelism);
+ }
+
+ public LinearDRPCInputDeclarer addBolt(IBasicBolt bolt) {
+ return addBolt(bolt, null);
+ }
+
+ public StormTopology createLocalTopology(ILocalDRPC drpc) {
+ return createTopology(new DRPCSpout(_function, drpc));
+ }
+
+ public StormTopology createRemoteTopology() {
+ return createTopology(new DRPCSpout(_function));
+ }
+
+
+ private StormTopology createTopology(DRPCSpout spout) {
+ final String SPOUT_ID = "spout";
+ final String PREPARE_ID = "prepare-request";
+
+ TopologyBuilder builder = new TopologyBuilder();
+ builder.setSpout(SPOUT_ID, spout);
+ builder.setBolt(PREPARE_ID, new PrepareRequest())
+ .noneGrouping(SPOUT_ID);
+ int i=0;
+ for(; i<_components.size();i++) {
+ Component component = _components.get(i);
+
+ Map<String, SourceArgs> source = new HashMap<String, SourceArgs>();
+ if (i==1) {
+ source.put(boltId(i-1), SourceArgs.single());
+ } else if (i>=2) {
+ source.put(boltId(i-1), SourceArgs.all());
+ }
+ IdStreamSpec idSpec = null;
+ if(i==_components.size()-1 && component.bolt instanceof FinishedCallback) {
+ idSpec = IdStreamSpec.makeDetectSpec(PREPARE_ID, PrepareRequest.ID_STREAM);
+ }
+ BoltDeclarer declarer = builder.setBolt(
+ boltId(i),
+ new CoordinatedBolt(component.bolt, source, idSpec),
+ component.parallelism);
+
+ for(Map conf: component.componentConfs) {
+ declarer.addConfigurations(conf);
+ }
+
+ if(idSpec!=null) {
+ declarer.fieldsGrouping(idSpec.getGlobalStreamId().get_componentId(), PrepareRequest.ID_STREAM, new Fields("request"));
+ }
+ if(i==0 && component.declarations.isEmpty()) {
+ declarer.noneGrouping(PREPARE_ID, PrepareRequest.ARGS_STREAM);
+ } else {
+ String prevId;
+ if(i==0) {
+ prevId = PREPARE_ID;
+ } else {
+ prevId = boltId(i-1);
+ }
+ for(InputDeclaration declaration: component.declarations) {
+ declaration.declare(prevId, declarer);
+ }
+ }
+ if(i>0) {
+ declarer.directGrouping(boltId(i-1), Constants.COORDINATED_STREAM_ID);
+ }
+ }
+
+ IRichBolt lastBolt = _components.get(_components.size()-1).bolt;
+ OutputFieldsGetter getter = new OutputFieldsGetter();
+ lastBolt.declareOutputFields(getter);
+ Map<String, StreamInfo> streams = getter.getFieldsDeclaration();
+ if(streams.size()!=1) {
+ throw new RuntimeException("Must declare exactly one stream from last bolt in LinearDRPCTopology");
+ }
+ String outputStream = streams.keySet().iterator().next();
+ List<String> fields = streams.get(outputStream).get_output_fields();
+ if(fields.size()!=2) {
+ throw new RuntimeException("Output stream of last component in LinearDRPCTopology must contain exactly two fields. The first should be the request id, and the second should be the result.");
+ }
+
+ builder.setBolt("JoinResult", new JoinResult(PREPARE_ID))
+ .fieldsGrouping(boltId(i-1), outputStream, new Fields(fields.get(0)))
+ .fieldsGrouping(PREPARE_ID, PrepareRequest.RETURN_STREAM, new Fields("request"));
+ i++;
+ builder.setBolt("ReturnResults", new ReturnResults())
+ .noneGrouping("JoinResult");
+ return builder.createTopology();
+ }
+
+ private static String boltId(int index) {
+ return "bolt" + index;
+ }
+
+ private static class Component {
+ public IRichBolt bolt;
+ public int parallelism;
+ public List<Map> componentConfs;
+ public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
+
+ public Component(IRichBolt bolt, int parallelism) {
+ this.bolt = bolt;
+ this.parallelism = parallelism;
+ this.componentConfs = new ArrayList();
+ }
+ }
+
+ private static interface InputDeclaration {
+ public void declare(String prevComponent, InputDeclarer declarer);
+ }
+
+ private class InputDeclarerImpl extends BaseConfigurationDeclarer<LinearDRPCInputDeclarer> implements LinearDRPCInputDeclarer {
+ Component _component;
+
+ public InputDeclarerImpl(Component component) {
+ _component = component;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer fieldsGrouping(final Fields fields) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.fieldsGrouping(prevComponent, fields);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer fieldsGrouping(final String streamId, final Fields fields) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.fieldsGrouping(prevComponent, streamId, fields);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer globalGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.globalGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer globalGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.globalGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer shuffleGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.shuffleGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer shuffleGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.shuffleGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer localOrShuffleGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.localOrShuffleGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer localOrShuffleGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.localOrShuffleGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer noneGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.noneGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer noneGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.noneGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer allGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.allGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer allGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.allGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer directGrouping() {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.directGrouping(prevComponent);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer directGrouping(final String streamId) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.directGrouping(prevComponent, streamId);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer partialKeyGrouping(Fields fields) {
+ return customGrouping(new PartialKeyGrouping(fields));
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer partialKeyGrouping(String streamId, Fields fields) {
+ return customGrouping(streamId, new PartialKeyGrouping(fields));
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer customGrouping(final CustomStreamGrouping grouping) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.customGrouping(prevComponent, grouping);
+ }
+ });
+ return this;
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer customGrouping(final String streamId, final CustomStreamGrouping grouping) {
+ addDeclaration(new InputDeclaration() {
+ @Override
+ public void declare(String prevComponent, InputDeclarer declarer) {
+ declarer.customGrouping(prevComponent, streamId, grouping);
+ }
+ });
+ return this;
+ }
+
+ private void addDeclaration(InputDeclaration declaration) {
+ _component.declarations.add(declaration);
+ }
+
+ @Override
+ public LinearDRPCInputDeclarer addConfigurations(Map conf) {
+ _component.componentConfs.add(conf);
+ return this;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/PrepareRequest.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/PrepareRequest.java b/jstorm-core/src/main/java/backtype/storm/drpc/PrepareRequest.java
new file mode 100755
index 0000000..bd32169
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/PrepareRequest.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.BasicOutputCollector;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseBasicBolt;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.tuple.Values;
+import java.util.Map;
+import java.util.Random;
+import backtype.storm.utils.Utils;
+
+
+public class PrepareRequest extends BaseBasicBolt {
+ public static final String ARGS_STREAM = Utils.DEFAULT_STREAM_ID;
+ public static final String RETURN_STREAM = "ret";
+ public static final String ID_STREAM = "id";
+
+ Random rand;
+
+ @Override
+ public void prepare(Map map, TopologyContext context) {
+ rand = new Random();
+ }
+
+ @Override
+ public void execute(Tuple tuple, BasicOutputCollector collector) {
+ String args = tuple.getString(0);
+ String returnInfo = tuple.getString(1);
+ long requestId = rand.nextLong();
+ collector.emit(ARGS_STREAM, new Values(requestId, args));
+ collector.emit(RETURN_STREAM, new Values(requestId, returnInfo));
+ collector.emit(ID_STREAM, new Values(requestId));
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ declarer.declareStream(ARGS_STREAM, new Fields("request", "args"));
+ declarer.declareStream(RETURN_STREAM, new Fields("request", "return"));
+ declarer.declareStream(ID_STREAM, new Fields("request"));
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/drpc/ReturnResults.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/drpc/ReturnResults.java b/jstorm-core/src/main/java/backtype/storm/drpc/ReturnResults.java
new file mode 100644
index 0000000..2ca517e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/drpc/ReturnResults.java
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.drpc;
+
+import backtype.storm.Config;
+import backtype.storm.generated.DistributedRPCInvocations;
+import backtype.storm.generated.AuthorizationException;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.topology.base.BaseRichBolt;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.utils.ServiceRegistry;
+import backtype.storm.utils.Utils;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.thrift.TException;
+import org.apache.thrift.transport.TTransportException;
+import org.json.simple.JSONValue;
+
+
+public class ReturnResults extends BaseRichBolt {
+ //ANY CHANGE TO THIS CODE MUST BE SERIALIZABLE COMPATIBLE OR THERE WILL BE PROBLEMS
+ static final long serialVersionUID = -774882142710631591L;
+
+ public static final Logger LOG = LoggerFactory.getLogger(ReturnResults.class);
+ OutputCollector _collector;
+ boolean local;
+ Map _conf;
+ Map<List, DRPCInvocationsClient> _clients = new HashMap<List, DRPCInvocationsClient>();
+
+ @Override
+ public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
+ _conf = stormConf;
+ _collector = collector;
+ local = stormConf.get(Config.STORM_CLUSTER_MODE).equals("local");
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ String result = (String) input.getValue(0);
+ String returnInfo = (String) input.getValue(1);
+ //LOG.info("Receive one message, resultInfo:{}, result:{}", returnInfo, result);
+ if(returnInfo!=null) {
+ Map retMap = (Map) JSONValue.parse(returnInfo);
+ final String host = (String) retMap.get("host");
+ final int port = Utils.getInt(retMap.get("port"));
+ String id = (String) retMap.get("id");
+ DistributedRPCInvocations.Iface client;
+ if(local) {
+ client = (DistributedRPCInvocations.Iface) ServiceRegistry.getService(host);
+ } else {
+ List server = new ArrayList() {{
+ add(host);
+ add(port);
+ }};
+
+ if(!_clients.containsKey(server)) {
+ try {
+ _clients.put(server, new DRPCInvocationsClient(_conf, host, port));
+ } catch (TTransportException ex) {
+ throw new RuntimeException(ex);
+ }
+ }
+ client = _clients.get(server);
+ }
+
+ try {
+ client.result(id, result);
+ _collector.ack(input);
+ } catch (AuthorizationException aze) {
+ LOG.error("Not authorized to return results to DRPC server", aze);
+ _collector.fail(input);
+ if (client instanceof DRPCInvocationsClient) {
+ try {
+ LOG.info("reconnecting... ");
+ ((DRPCInvocationsClient)client).reconnectClient(); //Blocking call
+ } catch (TException e2) {
+ throw new RuntimeException(e2);
+ }
+ }
+ } catch(TException e) {
+ LOG.error("Failed to return results to DRPC server", e);
+ _collector.fail(input);
+ if (client instanceof DRPCInvocationsClient) {
+ try {
+ LOG.info("reconnecting... ");
+ ((DRPCInvocationsClient)client).reconnectClient(); //Blocking call
+ } catch (TException e2) {
+ throw new RuntimeException(e2);
+ }
+ }
+ }
+ }
+ }
+
+ @Override
+ public void cleanup() {
+ for(DRPCInvocationsClient c: _clients.values()) {
+ c.close();
+ }
+ }
+
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/AlreadyAliveException.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/AlreadyAliveException.java b/jstorm-core/src/main/java/backtype/storm/generated/AlreadyAliveException.java
new file mode 100644
index 0000000..06eadaf
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/AlreadyAliveException.java
@@ -0,0 +1,389 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class AlreadyAliveException extends TException implements org.apache.thrift.TBase<AlreadyAliveException, AlreadyAliveException._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyAliveException> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyAliveException");
+
+ private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new AlreadyAliveExceptionStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new AlreadyAliveExceptionTupleSchemeFactory());
+ }
+
+ private String msg; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ MSG((short)1, "msg");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // MSG
+ return MSG;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AlreadyAliveException.class, metaDataMap);
+ }
+
+ public AlreadyAliveException() {
+ }
+
+ public AlreadyAliveException(
+ String msg)
+ {
+ this();
+ this.msg = msg;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public AlreadyAliveException(AlreadyAliveException other) {
+ if (other.is_set_msg()) {
+ this.msg = other.msg;
+ }
+ }
+
+ public AlreadyAliveException deepCopy() {
+ return new AlreadyAliveException(this);
+ }
+
+ @Override
+ public void clear() {
+ this.msg = null;
+ }
+
+ public String get_msg() {
+ return this.msg;
+ }
+
+ public void set_msg(String msg) {
+ this.msg = msg;
+ }
+
+ public void unset_msg() {
+ this.msg = null;
+ }
+
+ /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+ public boolean is_set_msg() {
+ return this.msg != null;
+ }
+
+ public void set_msg_isSet(boolean value) {
+ if (!value) {
+ this.msg = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case MSG:
+ if (value == null) {
+ unset_msg();
+ } else {
+ set_msg((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case MSG:
+ return get_msg();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case MSG:
+ return is_set_msg();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof AlreadyAliveException)
+ return this.equals((AlreadyAliveException)that);
+ return false;
+ }
+
+ public boolean equals(AlreadyAliveException that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_msg = true && this.is_set_msg();
+ boolean that_present_msg = true && that.is_set_msg();
+ if (this_present_msg || that_present_msg) {
+ if (!(this_present_msg && that_present_msg))
+ return false;
+ if (!this.msg.equals(that.msg))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_msg = true && (is_set_msg());
+ list.add(present_msg);
+ if (present_msg)
+ list.add(msg);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(AlreadyAliveException other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_msg()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("AlreadyAliveException(");
+ boolean first = true;
+
+ sb.append("msg:");
+ if (this.msg == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.msg);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_msg()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class AlreadyAliveExceptionStandardSchemeFactory implements SchemeFactory {
+ public AlreadyAliveExceptionStandardScheme getScheme() {
+ return new AlreadyAliveExceptionStandardScheme();
+ }
+ }
+
+ private static class AlreadyAliveExceptionStandardScheme extends StandardScheme<AlreadyAliveException> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, AlreadyAliveException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // MSG
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, AlreadyAliveException struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.msg != null) {
+ oprot.writeFieldBegin(MSG_FIELD_DESC);
+ oprot.writeString(struct.msg);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class AlreadyAliveExceptionTupleSchemeFactory implements SchemeFactory {
+ public AlreadyAliveExceptionTupleScheme getScheme() {
+ return new AlreadyAliveExceptionTupleScheme();
+ }
+ }
+
+ private static class AlreadyAliveExceptionTupleScheme extends TupleScheme<AlreadyAliveException> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, AlreadyAliveException struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.msg);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, AlreadyAliveException struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/AuthorizationException.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/AuthorizationException.java b/jstorm-core/src/main/java/backtype/storm/generated/AuthorizationException.java
new file mode 100644
index 0000000..02f72f0
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/AuthorizationException.java
@@ -0,0 +1,389 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class AuthorizationException extends TException implements org.apache.thrift.TBase<AuthorizationException, AuthorizationException._Fields>, java.io.Serializable, Cloneable, Comparable<AuthorizationException> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AuthorizationException");
+
+ private static final org.apache.thrift.protocol.TField MSG_FIELD_DESC = new org.apache.thrift.protocol.TField("msg", org.apache.thrift.protocol.TType.STRING, (short)1);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new AuthorizationExceptionStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new AuthorizationExceptionTupleSchemeFactory());
+ }
+
+ private String msg; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ MSG((short)1, "msg");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // MSG
+ return MSG;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.MSG, new org.apache.thrift.meta_data.FieldMetaData("msg", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(AuthorizationException.class, metaDataMap);
+ }
+
+ public AuthorizationException() {
+ }
+
+ public AuthorizationException(
+ String msg)
+ {
+ this();
+ this.msg = msg;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public AuthorizationException(AuthorizationException other) {
+ if (other.is_set_msg()) {
+ this.msg = other.msg;
+ }
+ }
+
+ public AuthorizationException deepCopy() {
+ return new AuthorizationException(this);
+ }
+
+ @Override
+ public void clear() {
+ this.msg = null;
+ }
+
+ public String get_msg() {
+ return this.msg;
+ }
+
+ public void set_msg(String msg) {
+ this.msg = msg;
+ }
+
+ public void unset_msg() {
+ this.msg = null;
+ }
+
+ /** Returns true if field msg is set (has been assigned a value) and false otherwise */
+ public boolean is_set_msg() {
+ return this.msg != null;
+ }
+
+ public void set_msg_isSet(boolean value) {
+ if (!value) {
+ this.msg = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case MSG:
+ if (value == null) {
+ unset_msg();
+ } else {
+ set_msg((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case MSG:
+ return get_msg();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case MSG:
+ return is_set_msg();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof AuthorizationException)
+ return this.equals((AuthorizationException)that);
+ return false;
+ }
+
+ public boolean equals(AuthorizationException that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_msg = true && this.is_set_msg();
+ boolean that_present_msg = true && that.is_set_msg();
+ if (this_present_msg || that_present_msg) {
+ if (!(this_present_msg && that_present_msg))
+ return false;
+ if (!this.msg.equals(that.msg))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_msg = true && (is_set_msg());
+ list.add(present_msg);
+ if (present_msg)
+ list.add(msg);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(AuthorizationException other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_msg()).compareTo(other.is_set_msg());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_msg()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.msg, other.msg);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("AuthorizationException(");
+ boolean first = true;
+
+ sb.append("msg:");
+ if (this.msg == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.msg);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_msg()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'msg' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class AuthorizationExceptionStandardSchemeFactory implements SchemeFactory {
+ public AuthorizationExceptionStandardScheme getScheme() {
+ return new AuthorizationExceptionStandardScheme();
+ }
+ }
+
+ private static class AuthorizationExceptionStandardScheme extends StandardScheme<AuthorizationException> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, AuthorizationException struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // MSG
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, AuthorizationException struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.msg != null) {
+ oprot.writeFieldBegin(MSG_FIELD_DESC);
+ oprot.writeString(struct.msg);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class AuthorizationExceptionTupleSchemeFactory implements SchemeFactory {
+ public AuthorizationExceptionTupleScheme getScheme() {
+ return new AuthorizationExceptionTupleScheme();
+ }
+ }
+
+ private static class AuthorizationExceptionTupleScheme extends TupleScheme<AuthorizationException> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, AuthorizationException struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ oprot.writeString(struct.msg);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, AuthorizationException struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.msg = iprot.readString();
+ struct.set_msg_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/Bolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/Bolt.java b/jstorm-core/src/main/java/backtype/storm/generated/Bolt.java
new file mode 100644
index 0000000..e3d0a07
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/Bolt.java
@@ -0,0 +1,497 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class Bolt implements org.apache.thrift.TBase<Bolt, Bolt._Fields>, java.io.Serializable, Cloneable, Comparable<Bolt> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Bolt");
+
+ private static final org.apache.thrift.protocol.TField BOLT_OBJECT_FIELD_DESC = new org.apache.thrift.protocol.TField("bolt_object", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+ private static final org.apache.thrift.protocol.TField COMMON_FIELD_DESC = new org.apache.thrift.protocol.TField("common", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new BoltStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new BoltTupleSchemeFactory());
+ }
+
+ private ComponentObject bolt_object; // required
+ private ComponentCommon common; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ BOLT_OBJECT((short)1, "bolt_object"),
+ COMMON((short)2, "common");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // BOLT_OBJECT
+ return BOLT_OBJECT;
+ case 2: // COMMON
+ return COMMON;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.BOLT_OBJECT, new org.apache.thrift.meta_data.FieldMetaData("bolt_object", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ComponentObject.class)));
+ tmpMap.put(_Fields.COMMON, new org.apache.thrift.meta_data.FieldMetaData("common", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ComponentCommon.class)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Bolt.class, metaDataMap);
+ }
+
+ public Bolt() {
+ }
+
+ public Bolt(
+ ComponentObject bolt_object,
+ ComponentCommon common)
+ {
+ this();
+ this.bolt_object = bolt_object;
+ this.common = common;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public Bolt(Bolt other) {
+ if (other.is_set_bolt_object()) {
+ this.bolt_object = new ComponentObject(other.bolt_object);
+ }
+ if (other.is_set_common()) {
+ this.common = new ComponentCommon(other.common);
+ }
+ }
+
+ public Bolt deepCopy() {
+ return new Bolt(this);
+ }
+
+ @Override
+ public void clear() {
+ this.bolt_object = null;
+ this.common = null;
+ }
+
+ public ComponentObject get_bolt_object() {
+ return this.bolt_object;
+ }
+
+ public void set_bolt_object(ComponentObject bolt_object) {
+ this.bolt_object = bolt_object;
+ }
+
+ public void unset_bolt_object() {
+ this.bolt_object = null;
+ }
+
+ /** Returns true if field bolt_object is set (has been assigned a value) and false otherwise */
+ public boolean is_set_bolt_object() {
+ return this.bolt_object != null;
+ }
+
+ public void set_bolt_object_isSet(boolean value) {
+ if (!value) {
+ this.bolt_object = null;
+ }
+ }
+
+ public ComponentCommon get_common() {
+ return this.common;
+ }
+
+ public void set_common(ComponentCommon common) {
+ this.common = common;
+ }
+
+ public void unset_common() {
+ this.common = null;
+ }
+
+ /** Returns true if field common is set (has been assigned a value) and false otherwise */
+ public boolean is_set_common() {
+ return this.common != null;
+ }
+
+ public void set_common_isSet(boolean value) {
+ if (!value) {
+ this.common = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case BOLT_OBJECT:
+ if (value == null) {
+ unset_bolt_object();
+ } else {
+ set_bolt_object((ComponentObject)value);
+ }
+ break;
+
+ case COMMON:
+ if (value == null) {
+ unset_common();
+ } else {
+ set_common((ComponentCommon)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case BOLT_OBJECT:
+ return get_bolt_object();
+
+ case COMMON:
+ return get_common();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case BOLT_OBJECT:
+ return is_set_bolt_object();
+ case COMMON:
+ return is_set_common();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof Bolt)
+ return this.equals((Bolt)that);
+ return false;
+ }
+
+ public boolean equals(Bolt that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_bolt_object = true && this.is_set_bolt_object();
+ boolean that_present_bolt_object = true && that.is_set_bolt_object();
+ if (this_present_bolt_object || that_present_bolt_object) {
+ if (!(this_present_bolt_object && that_present_bolt_object))
+ return false;
+ if (!this.bolt_object.equals(that.bolt_object))
+ return false;
+ }
+
+ boolean this_present_common = true && this.is_set_common();
+ boolean that_present_common = true && that.is_set_common();
+ if (this_present_common || that_present_common) {
+ if (!(this_present_common && that_present_common))
+ return false;
+ if (!this.common.equals(that.common))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_bolt_object = true && (is_set_bolt_object());
+ list.add(present_bolt_object);
+ if (present_bolt_object)
+ list.add(bolt_object);
+
+ boolean present_common = true && (is_set_common());
+ list.add(present_common);
+ if (present_common)
+ list.add(common);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(Bolt other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_bolt_object()).compareTo(other.is_set_bolt_object());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_bolt_object()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.bolt_object, other.bolt_object);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_common()).compareTo(other.is_set_common());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_common()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.common, other.common);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("Bolt(");
+ boolean first = true;
+
+ sb.append("bolt_object:");
+ if (this.bolt_object == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.bolt_object);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("common:");
+ if (this.common == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.common);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_bolt_object()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'bolt_object' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_common()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'common' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ if (common != null) {
+ common.validate();
+ }
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class BoltStandardSchemeFactory implements SchemeFactory {
+ public BoltStandardScheme getScheme() {
+ return new BoltStandardScheme();
+ }
+ }
+
+ private static class BoltStandardScheme extends StandardScheme<Bolt> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, Bolt struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // BOLT_OBJECT
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.bolt_object = new ComponentObject();
+ struct.bolt_object.read(iprot);
+ struct.set_bolt_object_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // COMMON
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.common = new ComponentCommon();
+ struct.common.read(iprot);
+ struct.set_common_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, Bolt struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.bolt_object != null) {
+ oprot.writeFieldBegin(BOLT_OBJECT_FIELD_DESC);
+ struct.bolt_object.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ if (struct.common != null) {
+ oprot.writeFieldBegin(COMMON_FIELD_DESC);
+ struct.common.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class BoltTupleSchemeFactory implements SchemeFactory {
+ public BoltTupleScheme getScheme() {
+ return new BoltTupleScheme();
+ }
+ }
+
+ private static class BoltTupleScheme extends TupleScheme<Bolt> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, Bolt struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ struct.bolt_object.write(oprot);
+ struct.common.write(oprot);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, Bolt struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.bolt_object = new ComponentObject();
+ struct.bolt_object.read(iprot);
+ struct.set_bolt_object_isSet(true);
+ struct.common = new ComponentCommon();
+ struct.common.read(iprot);
+ struct.set_common_isSet(true);
+ }
+ }
+
+}
+
[15/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/Nimbus.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/Nimbus.py b/jstorm-client/src/main/py/storm/Nimbus.py
deleted file mode 100644
index 9f382a5..0000000
--- a/jstorm-client/src/main/py/storm/Nimbus.py
+++ /dev/null
@@ -1,4149 +0,0 @@
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-from thrift.Thrift import *
-from ttypes import *
-from thrift.Thrift import TProcessor
-from thrift.transport import TTransport
-from thrift.protocol import TBinaryProtocol, TProtocol
-try:
- from thrift.protocol import fastbinary
-except:
- fastbinary = None
-
-
-class Iface:
- def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
- """
- Parameters:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- """
- pass
-
- def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
- """
- Parameters:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- - options
- """
- pass
-
- def killTopology(self, name):
- """
- Parameters:
- - name
- """
- pass
-
- def killTopologyWithOpts(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- pass
-
- def activate(self, name):
- """
- Parameters:
- - name
- """
- pass
-
- def deactivate(self, name):
- """
- Parameters:
- - name
- """
- pass
-
- def rebalance(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- pass
-
- def metricMonitor(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- pass
-
- def beginLibUpload(self, libName):
- """
- Parameters:
- - libName
- """
- pass
-
- def beginFileUpload(self, ):
- pass
-
- def uploadChunk(self, location, chunk):
- """
- Parameters:
- - location
- - chunk
- """
- pass
-
- def finishFileUpload(self, location):
- """
- Parameters:
- - location
- """
- pass
-
- def beginFileDownload(self, file):
- """
- Parameters:
- - file
- """
- pass
-
- def downloadChunk(self, id):
- """
- Parameters:
- - id
- """
- pass
-
- def getNimbusConf(self, ):
- pass
-
- def getClusterInfo(self, ):
- pass
-
- def getTopologyInfo(self, id):
- """
- Parameters:
- - id
- """
- pass
-
- def getSupervisorWorkers(self, host):
- """
- Parameters:
- - host
- """
- pass
-
- def getTopologyConf(self, id):
- """
- Parameters:
- - id
- """
- pass
-
- def getTopology(self, id):
- """
- Parameters:
- - id
- """
- pass
-
- def getUserTopology(self, id):
- """
- Parameters:
- - id
- """
- pass
-
- def getTopologyMetric(self, id):
- """
- Parameters:
- - id
- """
- pass
-
-
-class Client(Iface):
- def __init__(self, iprot, oprot=None):
- self._iprot = self._oprot = iprot
- if oprot is not None:
- self._oprot = oprot
- self._seqid = 0
-
- def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
- """
- Parameters:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- """
- self.send_submitTopology(name, uploadedJarLocation, jsonConf, topology)
- self.recv_submitTopology()
-
- def send_submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
- self._oprot.writeMessageBegin('submitTopology', TMessageType.CALL, self._seqid)
- args = submitTopology_args()
- args.name = name
- args.uploadedJarLocation = uploadedJarLocation
- args.jsonConf = jsonConf
- args.topology = topology
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_submitTopology(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = submitTopology_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- if result.ite is not None:
- raise result.ite
- if result.tae is not None:
- raise result.tae
- return
-
- def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
- """
- Parameters:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- - options
- """
- self.send_submitTopologyWithOpts(name, uploadedJarLocation, jsonConf, topology, options)
- self.recv_submitTopologyWithOpts()
-
- def send_submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
- self._oprot.writeMessageBegin('submitTopologyWithOpts', TMessageType.CALL, self._seqid)
- args = submitTopologyWithOpts_args()
- args.name = name
- args.uploadedJarLocation = uploadedJarLocation
- args.jsonConf = jsonConf
- args.topology = topology
- args.options = options
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_submitTopologyWithOpts(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = submitTopologyWithOpts_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- if result.ite is not None:
- raise result.ite
- if result.tae is not None:
- raise result.tae
- return
-
- def killTopology(self, name):
- """
- Parameters:
- - name
- """
- self.send_killTopology(name)
- self.recv_killTopology()
-
- def send_killTopology(self, name):
- self._oprot.writeMessageBegin('killTopology', TMessageType.CALL, self._seqid)
- args = killTopology_args()
- args.name = name
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_killTopology(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = killTopology_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- return
-
- def killTopologyWithOpts(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- self.send_killTopologyWithOpts(name, options)
- self.recv_killTopologyWithOpts()
-
- def send_killTopologyWithOpts(self, name, options):
- self._oprot.writeMessageBegin('killTopologyWithOpts', TMessageType.CALL, self._seqid)
- args = killTopologyWithOpts_args()
- args.name = name
- args.options = options
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_killTopologyWithOpts(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = killTopologyWithOpts_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- return
-
- def activate(self, name):
- """
- Parameters:
- - name
- """
- self.send_activate(name)
- self.recv_activate()
-
- def send_activate(self, name):
- self._oprot.writeMessageBegin('activate', TMessageType.CALL, self._seqid)
- args = activate_args()
- args.name = name
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_activate(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = activate_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- return
-
- def deactivate(self, name):
- """
- Parameters:
- - name
- """
- self.send_deactivate(name)
- self.recv_deactivate()
-
- def send_deactivate(self, name):
- self._oprot.writeMessageBegin('deactivate', TMessageType.CALL, self._seqid)
- args = deactivate_args()
- args.name = name
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_deactivate(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = deactivate_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- return
-
- def rebalance(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- self.send_rebalance(name, options)
- self.recv_rebalance()
-
- def send_rebalance(self, name, options):
- self._oprot.writeMessageBegin('rebalance', TMessageType.CALL, self._seqid)
- args = rebalance_args()
- args.name = name
- args.options = options
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_rebalance(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = rebalance_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- if result.ite is not None:
- raise result.ite
- return
-
- def metricMonitor(self, name, options):
- """
- Parameters:
- - name
- - options
- """
- self.send_metricMonitor(name, options)
- self.recv_metricMonitor()
-
- def send_metricMonitor(self, name, options):
- self._oprot.writeMessageBegin('metricMonitor', TMessageType.CALL, self._seqid)
- args = metricMonitor_args()
- args.name = name
- args.options = options
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_metricMonitor(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = metricMonitor_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.e is not None:
- raise result.e
- return
-
- def beginLibUpload(self, libName):
- """
- Parameters:
- - libName
- """
- self.send_beginLibUpload(libName)
- self.recv_beginLibUpload()
-
- def send_beginLibUpload(self, libName):
- self._oprot.writeMessageBegin('beginLibUpload', TMessageType.CALL, self._seqid)
- args = beginLibUpload_args()
- args.libName = libName
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_beginLibUpload(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = beginLibUpload_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- return
-
- def beginFileUpload(self, ):
- self.send_beginFileUpload()
- return self.recv_beginFileUpload()
-
- def send_beginFileUpload(self, ):
- self._oprot.writeMessageBegin('beginFileUpload', TMessageType.CALL, self._seqid)
- args = beginFileUpload_args()
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_beginFileUpload(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = beginFileUpload_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileUpload failed: unknown result");
-
- def uploadChunk(self, location, chunk):
- """
- Parameters:
- - location
- - chunk
- """
- self.send_uploadChunk(location, chunk)
- self.recv_uploadChunk()
-
- def send_uploadChunk(self, location, chunk):
- self._oprot.writeMessageBegin('uploadChunk', TMessageType.CALL, self._seqid)
- args = uploadChunk_args()
- args.location = location
- args.chunk = chunk
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_uploadChunk(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = uploadChunk_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- return
-
- def finishFileUpload(self, location):
- """
- Parameters:
- - location
- """
- self.send_finishFileUpload(location)
- self.recv_finishFileUpload()
-
- def send_finishFileUpload(self, location):
- self._oprot.writeMessageBegin('finishFileUpload', TMessageType.CALL, self._seqid)
- args = finishFileUpload_args()
- args.location = location
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_finishFileUpload(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = finishFileUpload_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- return
-
- def beginFileDownload(self, file):
- """
- Parameters:
- - file
- """
- self.send_beginFileDownload(file)
- return self.recv_beginFileDownload()
-
- def send_beginFileDownload(self, file):
- self._oprot.writeMessageBegin('beginFileDownload', TMessageType.CALL, self._seqid)
- args = beginFileDownload_args()
- args.file = file
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_beginFileDownload(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = beginFileDownload_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileDownload failed: unknown result");
-
- def downloadChunk(self, id):
- """
- Parameters:
- - id
- """
- self.send_downloadChunk(id)
- return self.recv_downloadChunk()
-
- def send_downloadChunk(self, id):
- self._oprot.writeMessageBegin('downloadChunk', TMessageType.CALL, self._seqid)
- args = downloadChunk_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_downloadChunk(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = downloadChunk_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadChunk failed: unknown result");
-
- def getNimbusConf(self, ):
- self.send_getNimbusConf()
- return self.recv_getNimbusConf()
-
- def send_getNimbusConf(self, ):
- self._oprot.writeMessageBegin('getNimbusConf', TMessageType.CALL, self._seqid)
- args = getNimbusConf_args()
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getNimbusConf(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getNimbusConf_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getNimbusConf failed: unknown result");
-
- def getClusterInfo(self, ):
- self.send_getClusterInfo()
- return self.recv_getClusterInfo()
-
- def send_getClusterInfo(self, ):
- self._oprot.writeMessageBegin('getClusterInfo', TMessageType.CALL, self._seqid)
- args = getClusterInfo_args()
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getClusterInfo(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getClusterInfo_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterInfo failed: unknown result");
-
- def getTopologyInfo(self, id):
- """
- Parameters:
- - id
- """
- self.send_getTopologyInfo(id)
- return self.recv_getTopologyInfo()
-
- def send_getTopologyInfo(self, id):
- self._oprot.writeMessageBegin('getTopologyInfo', TMessageType.CALL, self._seqid)
- args = getTopologyInfo_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getTopologyInfo(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getTopologyInfo_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyInfo failed: unknown result");
-
- def getSupervisorWorkers(self, host):
- """
- Parameters:
- - host
- """
- self.send_getSupervisorWorkers(host)
- return self.recv_getSupervisorWorkers()
-
- def send_getSupervisorWorkers(self, host):
- self._oprot.writeMessageBegin('getSupervisorWorkers', TMessageType.CALL, self._seqid)
- args = getSupervisorWorkers_args()
- args.host = host
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getSupervisorWorkers(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getSupervisorWorkers_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getSupervisorWorkers failed: unknown result");
-
- def getTopologyConf(self, id):
- """
- Parameters:
- - id
- """
- self.send_getTopologyConf(id)
- return self.recv_getTopologyConf()
-
- def send_getTopologyConf(self, id):
- self._oprot.writeMessageBegin('getTopologyConf', TMessageType.CALL, self._seqid)
- args = getTopologyConf_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getTopologyConf(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getTopologyConf_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyConf failed: unknown result");
-
- def getTopology(self, id):
- """
- Parameters:
- - id
- """
- self.send_getTopology(id)
- return self.recv_getTopology()
-
- def send_getTopology(self, id):
- self._oprot.writeMessageBegin('getTopology', TMessageType.CALL, self._seqid)
- args = getTopology_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getTopology(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getTopology_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopology failed: unknown result");
-
- def getUserTopology(self, id):
- """
- Parameters:
- - id
- """
- self.send_getUserTopology(id)
- return self.recv_getUserTopology()
-
- def send_getUserTopology(self, id):
- self._oprot.writeMessageBegin('getUserTopology', TMessageType.CALL, self._seqid)
- args = getUserTopology_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getUserTopology(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getUserTopology_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserTopology failed: unknown result");
-
- def getTopologyMetric(self, id):
- """
- Parameters:
- - id
- """
- self.send_getTopologyMetric(id)
- return self.recv_getTopologyMetric()
-
- def send_getTopologyMetric(self, id):
- self._oprot.writeMessageBegin('getTopologyMetric', TMessageType.CALL, self._seqid)
- args = getTopologyMetric_args()
- args.id = id
- args.write(self._oprot)
- self._oprot.writeMessageEnd()
- self._oprot.trans.flush()
-
- def recv_getTopologyMetric(self, ):
- (fname, mtype, rseqid) = self._iprot.readMessageBegin()
- if mtype == TMessageType.EXCEPTION:
- x = TApplicationException()
- x.read(self._iprot)
- self._iprot.readMessageEnd()
- raise x
- result = getTopologyMetric_result()
- result.read(self._iprot)
- self._iprot.readMessageEnd()
- if result.success is not None:
- return result.success
- if result.e is not None:
- raise result.e
- raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyMetric failed: unknown result");
-
-
-class Processor(Iface, TProcessor):
- def __init__(self, handler):
- self._handler = handler
- self._processMap = {}
- self._processMap["submitTopology"] = Processor.process_submitTopology
- self._processMap["submitTopologyWithOpts"] = Processor.process_submitTopologyWithOpts
- self._processMap["killTopology"] = Processor.process_killTopology
- self._processMap["killTopologyWithOpts"] = Processor.process_killTopologyWithOpts
- self._processMap["activate"] = Processor.process_activate
- self._processMap["deactivate"] = Processor.process_deactivate
- self._processMap["rebalance"] = Processor.process_rebalance
- self._processMap["metricMonitor"] = Processor.process_metricMonitor
- self._processMap["beginLibUpload"] = Processor.process_beginLibUpload
- self._processMap["beginFileUpload"] = Processor.process_beginFileUpload
- self._processMap["uploadChunk"] = Processor.process_uploadChunk
- self._processMap["finishFileUpload"] = Processor.process_finishFileUpload
- self._processMap["beginFileDownload"] = Processor.process_beginFileDownload
- self._processMap["downloadChunk"] = Processor.process_downloadChunk
- self._processMap["getNimbusConf"] = Processor.process_getNimbusConf
- self._processMap["getClusterInfo"] = Processor.process_getClusterInfo
- self._processMap["getTopologyInfo"] = Processor.process_getTopologyInfo
- self._processMap["getSupervisorWorkers"] = Processor.process_getSupervisorWorkers
- self._processMap["getTopologyConf"] = Processor.process_getTopologyConf
- self._processMap["getTopology"] = Processor.process_getTopology
- self._processMap["getUserTopology"] = Processor.process_getUserTopology
- self._processMap["getTopologyMetric"] = Processor.process_getTopologyMetric
-
- def process(self, iprot, oprot):
- (name, type, seqid) = iprot.readMessageBegin()
- if name not in self._processMap:
- iprot.skip(TType.STRUCT)
- iprot.readMessageEnd()
- x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
- oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
- x.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
- return
- else:
- self._processMap[name](self, seqid, iprot, oprot)
- return True
-
- def process_submitTopology(self, seqid, iprot, oprot):
- args = submitTopology_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = submitTopology_result()
- try:
- self._handler.submitTopology(args.name, args.uploadedJarLocation, args.jsonConf, args.topology)
- except AlreadyAliveException, e:
- result.e = e
- except InvalidTopologyException, ite:
- result.ite = ite
- except TopologyAssignException, tae:
- result.tae = tae
- oprot.writeMessageBegin("submitTopology", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_submitTopologyWithOpts(self, seqid, iprot, oprot):
- args = submitTopologyWithOpts_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = submitTopologyWithOpts_result()
- try:
- self._handler.submitTopologyWithOpts(args.name, args.uploadedJarLocation, args.jsonConf, args.topology, args.options)
- except AlreadyAliveException, e:
- result.e = e
- except InvalidTopologyException, ite:
- result.ite = ite
- except TopologyAssignException, tae:
- result.tae = tae
- oprot.writeMessageBegin("submitTopologyWithOpts", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_killTopology(self, seqid, iprot, oprot):
- args = killTopology_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = killTopology_result()
- try:
- self._handler.killTopology(args.name)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("killTopology", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_killTopologyWithOpts(self, seqid, iprot, oprot):
- args = killTopologyWithOpts_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = killTopologyWithOpts_result()
- try:
- self._handler.killTopologyWithOpts(args.name, args.options)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("killTopologyWithOpts", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_activate(self, seqid, iprot, oprot):
- args = activate_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = activate_result()
- try:
- self._handler.activate(args.name)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("activate", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_deactivate(self, seqid, iprot, oprot):
- args = deactivate_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = deactivate_result()
- try:
- self._handler.deactivate(args.name)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("deactivate", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_rebalance(self, seqid, iprot, oprot):
- args = rebalance_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = rebalance_result()
- try:
- self._handler.rebalance(args.name, args.options)
- except NotAliveException, e:
- result.e = e
- except InvalidTopologyException, ite:
- result.ite = ite
- oprot.writeMessageBegin("rebalance", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_metricMonitor(self, seqid, iprot, oprot):
- args = metricMonitor_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = metricMonitor_result()
- try:
- self._handler.metricMonitor(args.name, args.options)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("metricMonitor", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_beginLibUpload(self, seqid, iprot, oprot):
- args = beginLibUpload_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = beginLibUpload_result()
- self._handler.beginLibUpload(args.libName)
- oprot.writeMessageBegin("beginLibUpload", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_beginFileUpload(self, seqid, iprot, oprot):
- args = beginFileUpload_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = beginFileUpload_result()
- result.success = self._handler.beginFileUpload()
- oprot.writeMessageBegin("beginFileUpload", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_uploadChunk(self, seqid, iprot, oprot):
- args = uploadChunk_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = uploadChunk_result()
- self._handler.uploadChunk(args.location, args.chunk)
- oprot.writeMessageBegin("uploadChunk", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_finishFileUpload(self, seqid, iprot, oprot):
- args = finishFileUpload_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = finishFileUpload_result()
- self._handler.finishFileUpload(args.location)
- oprot.writeMessageBegin("finishFileUpload", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_beginFileDownload(self, seqid, iprot, oprot):
- args = beginFileDownload_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = beginFileDownload_result()
- result.success = self._handler.beginFileDownload(args.file)
- oprot.writeMessageBegin("beginFileDownload", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_downloadChunk(self, seqid, iprot, oprot):
- args = downloadChunk_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = downloadChunk_result()
- result.success = self._handler.downloadChunk(args.id)
- oprot.writeMessageBegin("downloadChunk", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getNimbusConf(self, seqid, iprot, oprot):
- args = getNimbusConf_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getNimbusConf_result()
- result.success = self._handler.getNimbusConf()
- oprot.writeMessageBegin("getNimbusConf", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getClusterInfo(self, seqid, iprot, oprot):
- args = getClusterInfo_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getClusterInfo_result()
- result.success = self._handler.getClusterInfo()
- oprot.writeMessageBegin("getClusterInfo", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getTopologyInfo(self, seqid, iprot, oprot):
- args = getTopologyInfo_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getTopologyInfo_result()
- try:
- result.success = self._handler.getTopologyInfo(args.id)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getTopologyInfo", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getSupervisorWorkers(self, seqid, iprot, oprot):
- args = getSupervisorWorkers_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getSupervisorWorkers_result()
- try:
- result.success = self._handler.getSupervisorWorkers(args.host)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getSupervisorWorkers", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getTopologyConf(self, seqid, iprot, oprot):
- args = getTopologyConf_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getTopologyConf_result()
- try:
- result.success = self._handler.getTopologyConf(args.id)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getTopologyConf", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getTopology(self, seqid, iprot, oprot):
- args = getTopology_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getTopology_result()
- try:
- result.success = self._handler.getTopology(args.id)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getTopology", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getUserTopology(self, seqid, iprot, oprot):
- args = getUserTopology_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getUserTopology_result()
- try:
- result.success = self._handler.getUserTopology(args.id)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getUserTopology", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
- def process_getTopologyMetric(self, seqid, iprot, oprot):
- args = getTopologyMetric_args()
- args.read(iprot)
- iprot.readMessageEnd()
- result = getTopologyMetric_result()
- try:
- result.success = self._handler.getTopologyMetric(args.id)
- except NotAliveException, e:
- result.e = e
- oprot.writeMessageBegin("getTopologyMetric", TMessageType.REPLY, seqid)
- result.write(oprot)
- oprot.writeMessageEnd()
- oprot.trans.flush()
-
-
-# HELPER FUNCTIONS AND STRUCTURES
-
-class submitTopology_args:
- """
- Attributes:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
- (3, TType.STRING, 'jsonConf', None, None, ), # 3
- (4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
- )
-
- def __hash__(self):
- return 0 + hash(self.name) + hash(self.uploadedJarLocation) + hash(self.jsonConf) + hash(self.topology)
-
- def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None,):
- self.name = name
- self.uploadedJarLocation = uploadedJarLocation
- self.jsonConf = jsonConf
- self.topology = topology
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.uploadedJarLocation = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRING:
- self.jsonConf = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.STRUCT:
- self.topology = StormTopology()
- self.topology.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('submitTopology_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.uploadedJarLocation is not None:
- oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
- oprot.writeString(self.uploadedJarLocation.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.jsonConf is not None:
- oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
- oprot.writeString(self.jsonConf.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.topology is not None:
- oprot.writeFieldBegin('topology', TType.STRUCT, 4)
- self.topology.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class submitTopology_result:
- """
- Attributes:
- - e
- - ite
- - tae
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
- (3, TType.STRUCT, 'tae', (TopologyAssignException, TopologyAssignException.thrift_spec), None, ), # 3
- )
-
- def __hash__(self):
- return 0 + hash(self.e) + hash(self.ite) + hash(self.tae)
-
- def __init__(self, e=None, ite=None, tae=None,):
- self.e = e
- self.ite = ite
- self.tae = tae
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = AlreadyAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.ite = InvalidTopologyException()
- self.ite.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRUCT:
- self.tae = TopologyAssignException()
- self.tae.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('submitTopology_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- if self.ite is not None:
- oprot.writeFieldBegin('ite', TType.STRUCT, 2)
- self.ite.write(oprot)
- oprot.writeFieldEnd()
- if self.tae is not None:
- oprot.writeFieldBegin('tae', TType.STRUCT, 3)
- self.tae.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class submitTopologyWithOpts_args:
- """
- Attributes:
- - name
- - uploadedJarLocation
- - jsonConf
- - topology
- - options
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRING, 'uploadedJarLocation', None, None, ), # 2
- (3, TType.STRING, 'jsonConf', None, None, ), # 3
- (4, TType.STRUCT, 'topology', (StormTopology, StormTopology.thrift_spec), None, ), # 4
- (5, TType.STRUCT, 'options', (SubmitOptions, SubmitOptions.thrift_spec), None, ), # 5
- )
-
- def __hash__(self):
- return 0 + hash(self.name) + hash(self.uploadedJarLocation) + hash(self.jsonConf) + hash(self.topology) + hash(self.options)
-
- def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None, options=None,):
- self.name = name
- self.uploadedJarLocation = uploadedJarLocation
- self.jsonConf = jsonConf
- self.topology = topology
- self.options = options
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.uploadedJarLocation = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRING:
- self.jsonConf = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.STRUCT:
- self.topology = StormTopology()
- self.topology.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.STRUCT:
- self.options = SubmitOptions()
- self.options.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('submitTopologyWithOpts_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.uploadedJarLocation is not None:
- oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
- oprot.writeString(self.uploadedJarLocation.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.jsonConf is not None:
- oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
- oprot.writeString(self.jsonConf.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.topology is not None:
- oprot.writeFieldBegin('topology', TType.STRUCT, 4)
- self.topology.write(oprot)
- oprot.writeFieldEnd()
- if self.options is not None:
- oprot.writeFieldBegin('options', TType.STRUCT, 5)
- self.options.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class submitTopologyWithOpts_result:
- """
- Attributes:
- - e
- - ite
- - tae
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (AlreadyAliveException, AlreadyAliveException.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
- (3, TType.STRUCT, 'tae', (TopologyAssignException, TopologyAssignException.thrift_spec), None, ), # 3
- )
-
- def __hash__(self):
- return 0 + hash(self.e) + hash(self.ite) + hash(self.tae)
-
- def __init__(self, e=None, ite=None, tae=None,):
- self.e = e
- self.ite = ite
- self.tae = tae
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = AlreadyAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.ite = InvalidTopologyException()
- self.ite.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRUCT:
- self.tae = TopologyAssignException()
- self.tae.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('submitTopologyWithOpts_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- if self.ite is not None:
- oprot.writeFieldBegin('ite', TType.STRUCT, 2)
- self.ite.write(oprot)
- oprot.writeFieldEnd()
- if self.tae is not None:
- oprot.writeFieldBegin('tae', TType.STRUCT, 3)
- self.tae.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class killTopology_args:
- """
- Attributes:
- - name
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.name)
-
- def __init__(self, name=None,):
- self.name = name
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('killTopology_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class killTopology_result:
- """
- Attributes:
- - e
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.e)
-
- def __init__(self, e=None,):
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('killTopology_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class killTopologyWithOpts_args:
- """
- Attributes:
- - name
- - options
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRUCT, 'options', (KillOptions, KillOptions.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.name) + hash(self.options)
-
- def __init__(self, name=None, options=None,):
- self.name = name
- self.options = options
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.options = KillOptions()
- self.options.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('killTopologyWithOpts_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.options is not None:
- oprot.writeFieldBegin('options', TType.STRUCT, 2)
- self.options.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class killTopologyWithOpts_result:
- """
- Attributes:
- - e
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.e)
-
- def __init__(self, e=None,):
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('killTopologyWithOpts_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class activate_args:
- """
- Attributes:
- - name
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.name)
-
- def __init__(self, name=None,):
- self.name = name
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('activate_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class activate_result:
- """
- Attributes:
- - e
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.e)
-
- def __init__(self, e=None,):
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('activate_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class deactivate_args:
- """
- Attributes:
- - name
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.name)
-
- def __init__(self, name=None,):
- self.name = name
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('deactivate_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class deactivate_result:
- """
- Attributes:
- - e
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.e)
-
- def __init__(self, e=None,):
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('deactivate_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class rebalance_args:
- """
- Attributes:
- - name
- - options
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRUCT, 'options', (RebalanceOptions, RebalanceOptions.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.name) + hash(self.options)
-
- def __init__(self, name=None, options=None,):
- self.name = name
- self.options = options
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.options = RebalanceOptions()
- self.options.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('rebalance_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.options is not None:
- oprot.writeFieldBegin('options', TType.STRUCT, 2)
- self.options.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class rebalance_result:
- """
- Attributes:
- - e
- - ite
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'ite', (InvalidTopologyException, InvalidTopologyException.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.e) + hash(self.ite)
-
- def __init__(self, e=None, ite=None,):
- self.e = e
- self.ite = ite
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.ite = InvalidTopologyException()
- self.ite.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('rebalance_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- if self.ite is not None:
- oprot.writeFieldBegin('ite', TType.STRUCT, 2)
- self.ite.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class metricMonitor_args:
- """
- Attributes:
- - name
- - options
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'name', None, None, ), # 1
- (2, TType.STRUCT, 'options', (MonitorOptions, MonitorOptions.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.name) + hash(self.options)
-
- def __init__(self, name=None, options=None,):
- self.name = name
- self.options = options
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.options = MonitorOptions()
- self.options.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('metricMonitor_args')
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 1)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.options is not None:
- oprot.writeFieldBegin('options', TType.STRUCT, 2)
- self.options.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class metricMonitor_result:
- """
- Attributes:
- - e
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'e', (NotAliveException, NotAliveException.thrift_spec), None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.e)
-
- def __init__(self, e=None,):
- self.e = e
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.e = NotAliveException()
- self.e.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('metricMonitor_result')
- if self.e is not None:
- oprot.writeFieldBegin('e', TType.STRUCT, 1)
- self.e.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class beginLibUpload_args:
- """
- Attributes:
- - libName
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'libName', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.libName)
-
- def __init__(self, libName=None,):
- self.libName = libName
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.libName = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('beginLibUpload_args')
- if self.libName is not None:
- oprot.writeFieldBegin('libName', TType.STRING, 1)
- oprot.writeString(self.libName.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class beginLibUpload_result:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('beginLibUpload_result')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class beginFileUpload_args:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('beginFileUpload_args')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class beginFileUpload_result:
- """
- Attributes:
- - success
- """
-
- thrift_spec = (
- (0, TType.STRING, 'success', None, None, ), # 0
- )
-
- def __hash__(self):
- return 0 + hash(self.success)
-
- def __init__(self, success=None,):
- self.success = success
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 0:
- if ftype == TType.STRING:
- self.success = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('beginFileUpload_result')
- if self.success is not None:
- oprot.writeFieldBegin('success', TType.STRING, 0)
- oprot.writeString(self.success.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class uploadChunk_args:
- """
- Attributes:
- - location
- - chunk
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'location', None, None, ), # 1
- (2, TType.STRING, 'chunk', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.location) + hash(self.chunk)
-
- def __init__(self, location=None, chunk=None,):
- self.location = location
- self.chunk = chunk
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.location = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.chunk = iprot.readString();
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('uploadChunk_args')
- if self.location is not None:
- oprot.writeFieldBegin('location', TType.STRING, 1)
- oprot.writeString(self.location.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.chunk is not None:
- oprot.writeFieldBegin('chunk', TType.STRING, 2)
- oprot.writeString(self.chunk)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class uploadChunk_result:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('uploadChunk_result')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class finishFileUpload_args:
- """
- Attributes:
- - location
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'location', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.location)
-
- def __init__(self, location=None,):
- self.location = location
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.location = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('finishFileUpload_args')
- if self.location is not None:
- oprot.writeFieldBegin('location', TType.STRING, 1)
- oprot.writeString(self.location.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class finishFileUpload_result:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('finishFileUpload_result')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class beginFileDownload_args:
- """
- Attributes:
- - file
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'file', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.file)
-
- def __init__(self, file=None,):
- self.file = file
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isins
<TRUNCATED>
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/__init__.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/__init__.py b/jstorm-client/src/main/py/storm/__init__.py
deleted file mode 100644
index 10e7d40..0000000
--- a/jstorm-client/src/main/py/storm/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-__all__ = ['ttypes', 'constants', 'Nimbus', 'DistributedRPC', 'DistributedRPCInvocations']
[20/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/MapGet.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/MapGet.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/MapGet.java
deleted file mode 100644
index 17e12ee..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/MapGet.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package storm.trident.operation.builtin;
-
-import backtype.storm.tuple.Values;
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.state.BaseQueryFunction;
-import storm.trident.state.map.ReadOnlyMapState;
-import storm.trident.tuple.TridentTuple;
-
-
-public class MapGet extends BaseQueryFunction<ReadOnlyMapState, Object> {
- @Override
- public List<Object> batchRetrieve(ReadOnlyMapState map, List<TridentTuple> keys) {
- return map.multiGet((List) keys);
- }
-
- @Override
- public void execute(TridentTuple tuple, Object result, TridentCollector collector) {
- collector.emit(new Values(result));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/Negate.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/Negate.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/Negate.java
deleted file mode 100644
index 7a48477..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/Negate.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package storm.trident.operation.builtin;
-
-import java.util.Map;
-import storm.trident.operation.Filter;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-public class Negate implements Filter {
-
- Filter _delegate;
-
- public Negate(Filter delegate) {
- _delegate = delegate;
- }
-
- @Override
- public boolean isKeep(TridentTuple tuple) {
- return !_delegate.isKeep(tuple);
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _delegate.prepare(conf, context);
- }
-
- @Override
- public void cleanup() {
- _delegate.cleanup();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/SnapshotGet.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/SnapshotGet.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/SnapshotGet.java
deleted file mode 100644
index fbc3286..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/SnapshotGet.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package storm.trident.operation.builtin;
-
-import backtype.storm.tuple.Values;
-import java.util.ArrayList;
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.state.BaseQueryFunction;
-import storm.trident.state.snapshot.ReadOnlySnapshottable;
-import storm.trident.tuple.TridentTuple;
-
-public class SnapshotGet extends BaseQueryFunction<ReadOnlySnapshottable, Object> {
-
- @Override
- public List<Object> batchRetrieve(ReadOnlySnapshottable state, List<TridentTuple> args) {
- List<Object> ret = new ArrayList<Object>(args.size());
- Object snapshot = state.get();
- for(int i=0; i<args.size(); i++) {
- ret.add(snapshot);
- }
- return ret;
- }
-
- @Override
- public void execute(TridentTuple tuple, Object result, TridentCollector collector) {
- collector.emit(new Values(result));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/Sum.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/Sum.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/Sum.java
deleted file mode 100644
index d67ac66..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/Sum.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package storm.trident.operation.builtin;
-
-import clojure.lang.Numbers;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.tuple.TridentTuple;
-
-
-public class Sum implements CombinerAggregator<Number> {
-
- @Override
- public Number init(TridentTuple tuple) {
- return (Number) tuple.getValue(0);
- }
-
- @Override
- public Number combine(Number val1, Number val2) {
- return Numbers.add(val1, val2);
- }
-
- @Override
- public Number zero() {
- return 0;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/TupleCollectionGet.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/TupleCollectionGet.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/TupleCollectionGet.java
deleted file mode 100644
index 6302e02..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/TupleCollectionGet.java
+++ /dev/null
@@ -1,29 +0,0 @@
-package storm.trident.operation.builtin;
-
-import storm.trident.state.ITupleCollection;
-import java.util.ArrayList;
-import java.util.Iterator;
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.state.BaseQueryFunction;
-import storm.trident.state.State;
-import storm.trident.tuple.TridentTuple;
-
-public class TupleCollectionGet extends BaseQueryFunction<State, Iterator<List<Object>>> {
-
- @Override
- public List<Iterator<List<Object>>> batchRetrieve(State state, List<TridentTuple> args) {
- List<Iterator<List<Object>>> ret = new ArrayList(args.size());
- for(int i=0; i<args.size(); i++) {
- ret.add(((ITupleCollection)state).getTuples());
- }
- return ret;
- }
-
- @Override
- public void execute(TridentTuple tuple, Iterator<List<Object>> tuplesIterator, TridentCollector collector) {
- while(tuplesIterator.hasNext()) {
- collector.emit(tuplesIterator.next());
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/CaptureCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/CaptureCollector.java b/jstorm-client/src/main/java/storm/trident/operation/impl/CaptureCollector.java
deleted file mode 100644
index 9fe4419..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/CaptureCollector.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.util.ArrayList;
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-
-public class CaptureCollector implements TridentCollector {
- public List<List<Object>> captured = new ArrayList();
-
- TridentCollector _coll;
-
- public void setCollector(TridentCollector coll) {
- _coll = coll;
- }
-
- @Override
- public void emit(List<Object> values) {
- this.captured.add(values);
- }
-
- @Override
- public void reportError(Throwable t) {
- _coll.reportError(t);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java b/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java
deleted file mode 100644
index f8bd001..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedAggregatorImpl.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.ComboList;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-public class ChainedAggregatorImpl implements Aggregator<ChainedResult> {
- Aggregator[] _aggs;
- ProjectionFactory[] _inputFactories;
- ComboList.Factory _fact;
- Fields[] _inputFields;
-
-
-
- public ChainedAggregatorImpl(Aggregator[] aggs, Fields[] inputFields, ComboList.Factory fact) {
- _aggs = aggs;
- _inputFields = inputFields;
- _fact = fact;
- if(_aggs.length!=_inputFields.length) {
- throw new IllegalArgumentException("Require input fields for each aggregator");
- }
- }
-
- public void prepare(Map conf, TridentOperationContext context) {
- _inputFactories = new ProjectionFactory[_inputFields.length];
- for(int i=0; i<_inputFields.length; i++) {
- _inputFactories[i] = context.makeProjectionFactory(_inputFields[i]);
- _aggs[i].prepare(conf, new TridentOperationContext(context, _inputFactories[i]));
- }
- }
-
- public ChainedResult init(Object batchId, TridentCollector collector) {
- ChainedResult initted = new ChainedResult(collector, _aggs.length);
- for(int i=0; i<_aggs.length; i++) {
- initted.objs[i] = _aggs[i].init(batchId, initted.collectors[i]);
- }
- return initted;
- }
-
- public void aggregate(ChainedResult val, TridentTuple tuple, TridentCollector collector) {
- val.setFollowThroughCollector(collector);
- for(int i=0; i<_aggs.length; i++) {
- TridentTuple projected = _inputFactories[i].create((TridentTupleView) tuple);
- _aggs[i].aggregate(val.objs[i], projected, val.collectors[i]);
- }
- }
-
- public void complete(ChainedResult val, TridentCollector collector) {
- val.setFollowThroughCollector(collector);
- for(int i=0; i<_aggs.length; i++) {
- _aggs[i].complete(val.objs[i], val.collectors[i]);
- }
- if(_aggs.length > 1) { // otherwise, tuples were emitted directly
- int[] indices = new int[val.collectors.length];
- for(int i=0; i<indices.length; i++) {
- indices[i] = 0;
- }
- boolean keepGoing = true;
- //emit cross-join of all emitted tuples
- while(keepGoing) {
- List[] combined = new List[_aggs.length];
- for(int i=0; i< _aggs.length; i++) {
- CaptureCollector capturer = (CaptureCollector) val.collectors[i];
- combined[i] = capturer.captured.get(indices[i]);
- }
- collector.emit(_fact.create(combined));
- keepGoing = increment(val.collectors, indices, indices.length - 1);
- }
- }
- }
-
- //return false if can't increment anymore
- private boolean increment(TridentCollector[] lengths, int[] indices, int j) {
- if(j==-1) return false;
- indices[j]++;
- CaptureCollector capturer = (CaptureCollector) lengths[j];
- if(indices[j] >= capturer.captured.size()) {
- indices[j] = 0;
- return increment(lengths, indices, j-1);
- }
- return true;
- }
-
- public void cleanup() {
- for(Aggregator a: _aggs) {
- a.cleanup();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedResult.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedResult.java b/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedResult.java
deleted file mode 100644
index a35df3a..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/ChainedResult.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package storm.trident.operation.impl;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import storm.trident.operation.TridentCollector;
-
-
-//for ChainedAggregator
-public class ChainedResult {
- Object[] objs;
- TridentCollector[] collectors;
-
- public ChainedResult(TridentCollector collector, int size) {
- objs = new Object[size];
- collectors = new TridentCollector[size];
- for(int i=0; i<size; i++) {
- if(size==1) {
- collectors[i] = collector;
- } else {
- collectors[i] = new CaptureCollector();
- }
- }
- }
-
- public void setFollowThroughCollector(TridentCollector collector) {
- if(collectors.length>1) {
- for(TridentCollector c: collectors) {
- ((CaptureCollector) c).setCollector(collector);
- }
- }
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(objs);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggStateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggStateUpdater.java b/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggStateUpdater.java
deleted file mode 100644
index 97a9b9d..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggStateUpdater.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Values;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.state.CombinerValueUpdater;
-import storm.trident.state.StateUpdater;
-import storm.trident.state.snapshot.Snapshottable;
-import storm.trident.tuple.TridentTuple;
-
-public class CombinerAggStateUpdater implements StateUpdater<Snapshottable> {
- CombinerAggregator _agg;
-
- public CombinerAggStateUpdater(CombinerAggregator agg) {
- _agg = agg;
- }
-
-
- @Override
- public void updateState(Snapshottable state, List<TridentTuple> tuples, TridentCollector collector) {
- if(tuples.size()!=1) {
- throw new IllegalArgumentException("Combiner state updater should receive a single tuple. Received: " + tuples.toString());
- }
- Object newVal = state.update(new CombinerValueUpdater(_agg, tuples.get(0).getValue(0)));
- collector.emit(new Values(newVal));
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorCombineImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorCombineImpl.java b/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorCombineImpl.java
deleted file mode 100644
index d9d00e5..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorCombineImpl.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Values;
-import java.util.Map;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-public class CombinerAggregatorCombineImpl implements Aggregator<Result> {
- CombinerAggregator _agg;
-
- public CombinerAggregatorCombineImpl(CombinerAggregator agg) {
- _agg = agg;
- }
-
- public void prepare(Map conf, TridentOperationContext context) {
-
- }
-
- public Result init(Object batchId, TridentCollector collector) {
- Result ret = new Result();
- ret.obj = _agg.zero();
- return ret;
- }
-
- public void aggregate(Result val, TridentTuple tuple, TridentCollector collector) {
- Object v = tuple.getValue(0);
- if(val.obj==null) {
- val.obj = v;
- } else {
- val.obj = _agg.combine(val.obj, v);
- }
- }
-
- public void complete(Result val, TridentCollector collector) {
- collector.emit(new Values(val.obj));
- }
-
- public void cleanup() {
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorInitImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorInitImpl.java b/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorInitImpl.java
deleted file mode 100644
index 9020094..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/CombinerAggregatorInitImpl.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Values;
-import java.util.Map;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.Function;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-public class CombinerAggregatorInitImpl implements Function {
-
- CombinerAggregator _agg;
-
- public CombinerAggregatorInitImpl(CombinerAggregator agg) {
- _agg = agg;
- }
-
- @Override
- public void execute(TridentTuple tuple, TridentCollector collector) {
- collector.emit(new Values(_agg.init(tuple)));
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/FilterExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/FilterExecutor.java b/jstorm-client/src/main/java/storm/trident/operation/impl/FilterExecutor.java
deleted file mode 100644
index 2b96834..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/FilterExecutor.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.util.Map;
-import storm.trident.operation.Filter;
-import storm.trident.operation.Function;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-// works by emitting null to the collector. since the planner knows this is an ADD node with
-// no new output fields, it just passes the tuple forward
-public class FilterExecutor implements Function {
- Filter _filter;
-
- public FilterExecutor(Filter filter) {
- _filter = filter;
- }
-
- @Override
- public void execute(TridentTuple tuple, TridentCollector collector) {
- if(_filter.isKeep(tuple)) {
- collector.emit(null);
- }
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _filter.prepare(conf, context);
- }
-
- @Override
- public void cleanup() {
- _filter.cleanup();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/GlobalBatchToPartition.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/GlobalBatchToPartition.java b/jstorm-client/src/main/java/storm/trident/operation/impl/GlobalBatchToPartition.java
deleted file mode 100644
index 3bf52b3..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/GlobalBatchToPartition.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package storm.trident.operation.impl;
-
-
-public class GlobalBatchToPartition implements SingleEmitAggregator.BatchToPartition {
-
- @Override
- public int partitionIndex(Object batchId, int numPartitions) {
- // TODO: take away knowledge of storm's internals here
- return 0;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/GroupCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupCollector.java b/jstorm-client/src/main/java/storm/trident/operation/impl/GroupCollector.java
deleted file mode 100644
index b997217..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupCollector.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.ComboList;
-
-public class GroupCollector implements TridentCollector {
- public List<Object> currGroup;
-
- ComboList.Factory _factory;
- TridentCollector _collector;
-
- public GroupCollector(TridentCollector collector, ComboList.Factory factory) {
- _factory = factory;
- _collector = collector;
- }
-
- @Override
- public void emit(List<Object> values) {
- List[] delegates = new List[2];
- delegates[0] = currGroup;
- delegates[1] = values;
- _collector.emit(_factory.create(delegates));
- }
-
- @Override
- public void reportError(Throwable t) {
- _collector.reportError(t);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedAggregator.java b/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedAggregator.java
deleted file mode 100644
index d78de70..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedAggregator.java
+++ /dev/null
@@ -1,79 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Fields;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.ComboList;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-public class GroupedAggregator implements Aggregator<Object[]> {
- ProjectionFactory _groupFactory;
- ProjectionFactory _inputFactory;
- Aggregator _agg;
- ComboList.Factory _fact;
- Fields _inFields;
- Fields _groupFields;
-
- public GroupedAggregator(Aggregator agg, Fields group, Fields input, int outSize) {
- _groupFields = group;
- _inFields = input;
- _agg = agg;
- int[] sizes = new int[2];
- sizes[0] = _groupFields.size();
- sizes[1] = outSize;
- _fact = new ComboList.Factory(sizes);
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _inputFactory = context.makeProjectionFactory(_inFields);
- _groupFactory = context.makeProjectionFactory(_groupFields);
- _agg.prepare(conf, new TridentOperationContext(context, _inputFactory));
- }
-
- @Override
- public Object[] init(Object batchId, TridentCollector collector) {
- return new Object[] {new GroupCollector(collector, _fact), new HashMap(), batchId};
- }
-
- @Override
- public void aggregate(Object[] arr, TridentTuple tuple, TridentCollector collector) {
- GroupCollector groupColl = (GroupCollector) arr[0];
- Map<List, Object> val = (Map) arr[1];
- TridentTuple group = _groupFactory.create((TridentTupleView) tuple);
- TridentTuple input = _inputFactory.create((TridentTupleView) tuple);
- Object curr;
- if(!val.containsKey(group)) {
- curr = _agg.init(arr[2], groupColl);
- val.put((List) group, curr);
- } else {
- curr = val.get(group);
- }
- groupColl.currGroup = group;
- _agg.aggregate(curr, input, groupColl);
-
- }
-
- @Override
- public void complete(Object[] arr, TridentCollector collector) {
- Map<List, Object> val = (Map) arr[1];
- GroupCollector groupColl = (GroupCollector) arr[0];
- for(Entry<List, Object> e: val.entrySet()) {
- groupColl.currGroup = e.getKey();
- _agg.complete(e.getValue(), groupColl);
- }
- }
-
- @Override
- public void cleanup() {
- _agg.cleanup();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedMultiReducerExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedMultiReducerExecutor.java b/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedMultiReducerExecutor.java
deleted file mode 100644
index 2615962..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/GroupedMultiReducerExecutor.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.GroupedMultiReducer;
-import storm.trident.operation.MultiReducer;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentMultiReducerContext;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class GroupedMultiReducerExecutor implements MultiReducer<Map<TridentTuple, Object>> {
- GroupedMultiReducer _reducer;
- List<Fields> _groupFields;
- List<Fields> _inputFields;
- List<ProjectionFactory> _groupFactories = new ArrayList<ProjectionFactory>();
- List<ProjectionFactory> _inputFactories = new ArrayList<ProjectionFactory>();
-
- public GroupedMultiReducerExecutor(GroupedMultiReducer reducer, List<Fields> groupFields, List<Fields> inputFields) {
- if(inputFields.size()!=groupFields.size()) {
- throw new IllegalArgumentException("Multireducer groupFields and inputFields must be the same size");
- }
- _groupFields = groupFields;
- _inputFields = inputFields;
- _reducer = reducer;
- }
-
- @Override
- public void prepare(Map conf, TridentMultiReducerContext context) {
- for(int i=0; i<_groupFields.size(); i++) {
- _groupFactories.add(context.makeProjectionFactory(i, _groupFields.get(i)));
- _inputFactories.add(context.makeProjectionFactory(i, _inputFields.get(i)));
- }
- _reducer.prepare(conf, new TridentMultiReducerContext((List) _inputFactories));
- }
-
- @Override
- public Map<TridentTuple, Object> init(TridentCollector collector) {
- return new HashMap();
- }
-
- @Override
- public void execute(Map<TridentTuple, Object> state, int streamIndex, TridentTuple full, TridentCollector collector) {
- ProjectionFactory groupFactory = _groupFactories.get(streamIndex);
- ProjectionFactory inputFactory = _inputFactories.get(streamIndex);
-
- TridentTuple group = groupFactory.create(full);
- TridentTuple input = inputFactory.create(full);
-
- Object curr;
- if(!state.containsKey(group)) {
- curr = _reducer.init(collector, group);
- state.put(group, curr);
- } else {
- curr = state.get(group);
- }
- _reducer.execute(curr, streamIndex, group, input, collector);
- }
-
- @Override
- public void complete(Map<TridentTuple, Object> state, TridentCollector collector) {
- for(Map.Entry e: state.entrySet()) {
- TridentTuple group = (TridentTuple) e.getKey();
- Object val = e.getValue();
- _reducer.complete(val, group, collector);
- }
- }
-
- @Override
- public void cleanup() {
- _reducer.cleanup();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/IdentityMultiReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/IdentityMultiReducer.java b/jstorm-client/src/main/java/storm/trident/operation/impl/IdentityMultiReducer.java
deleted file mode 100644
index f482ec4..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/IdentityMultiReducer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.util.Map;
-import storm.trident.operation.MultiReducer;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentMultiReducerContext;
-import storm.trident.tuple.TridentTuple;
-
-
-public class IdentityMultiReducer implements MultiReducer {
-
- @Override
- public void prepare(Map conf, TridentMultiReducerContext context) {
- }
-
- @Override
- public Object init(TridentCollector collector) {
- return null;
- }
-
- @Override
- public void execute(Object state, int streamIndex, TridentTuple input, TridentCollector collector) {
- collector.emit(input);
- }
-
- @Override
- public void complete(Object state, TridentCollector collector) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/IndexHashBatchToPartition.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/IndexHashBatchToPartition.java b/jstorm-client/src/main/java/storm/trident/operation/impl/IndexHashBatchToPartition.java
deleted file mode 100644
index 779c4b8..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/IndexHashBatchToPartition.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package storm.trident.operation.impl;
-
-import storm.trident.partition.IndexHashGrouping;
-
-public class IndexHashBatchToPartition implements SingleEmitAggregator.BatchToPartition {
-
- @Override
- public int partitionIndex(Object batchId, int numPartitions) {
- return IndexHashGrouping.objectToIndex(batchId, numPartitions);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/JoinerMultiReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/JoinerMultiReducer.java b/jstorm-client/src/main/java/storm/trident/operation/impl/JoinerMultiReducer.java
deleted file mode 100644
index 963751e..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/JoinerMultiReducer.java
+++ /dev/null
@@ -1,142 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import storm.trident.JoinType;
-import storm.trident.operation.GroupedMultiReducer;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentMultiReducerContext;
-import storm.trident.operation.impl.JoinerMultiReducer.JoinState;
-import storm.trident.tuple.ComboList;
-import storm.trident.tuple.TridentTuple;
-
-public class JoinerMultiReducer implements GroupedMultiReducer<JoinState> {
-
- List<JoinType> _types;
- List<Fields> _sideFields;
- int _numGroupFields;
- ComboList.Factory _factory;
-
-
- public JoinerMultiReducer(List<JoinType> types, int numGroupFields, List<Fields> sides) {
- _types = types;
- _sideFields = sides;
- _numGroupFields = numGroupFields;
- }
-
- @Override
- public void prepare(Map conf, TridentMultiReducerContext context) {
- int[] sizes = new int[_sideFields.size() + 1];
- sizes[0] = _numGroupFields;
- for(int i=0; i<_sideFields.size(); i++) {
- sizes[i+1] = _sideFields.get(i).size();
- }
- _factory = new ComboList.Factory(sizes);
- }
-
- @Override
- public JoinState init(TridentCollector collector, TridentTuple group) {
- return new JoinState(_types.size(), group);
- }
-
- @Override
- public void execute(JoinState state, int streamIndex, TridentTuple group, TridentTuple input, TridentCollector collector) {
- //TODO: do the inner join incrementally, emitting the cross join with this tuple, against all other sides
- //TODO: only do cross join if at least one tuple in each side
- List<List> side = state.sides[streamIndex];
- if(side.isEmpty()) {
- state.numSidesReceived++;
- }
-
- side.add(input);
- if(state.numSidesReceived == state.sides.length) {
- emitCrossJoin(state, collector, streamIndex, input);
- }
- }
-
- @Override
- public void complete(JoinState state, TridentTuple group, TridentCollector collector) {
- List<List>[] sides = state.sides;
- boolean wasEmpty = state.numSidesReceived < sides.length;
- for(int i=0; i<sides.length; i++) {
- if(sides[i].isEmpty() && _types.get(i) == JoinType.OUTER) {
- state.numSidesReceived++;
- sides[i].add(makeNullList(_sideFields.get(i).size()));
- }
- }
- if(wasEmpty && state.numSidesReceived == sides.length) {
- emitCrossJoin(state, collector, -1, null);
- }
- }
-
- @Override
- public void cleanup() {
- }
-
- private List<Object> makeNullList(int size) {
- List<Object> ret = new ArrayList(size);
- for(int i=0; i<size; i++) {
- ret.add(null);
- }
- return ret;
- }
-
- private void emitCrossJoin(JoinState state, TridentCollector collector, int overrideIndex, TridentTuple overrideTuple) {
- List<List>[] sides = state.sides;
- int[] indices = state.indices;
- for(int i=0; i<indices.length; i++) {
- indices[i] = 0;
- }
-
- boolean keepGoing = true;
- //emit cross-join of all emitted tuples
- while(keepGoing) {
- List[] combined = new List[sides.length+1];
- combined[0] = state.group;
- for(int i=0; i<sides.length; i++) {
- if(i==overrideIndex) {
- combined[i+1] = overrideTuple;
- } else {
- combined[i+1] = sides[i].get(indices[i]);
- }
- }
- collector.emit(_factory.create(combined));
- keepGoing = increment(sides, indices, indices.length - 1, overrideIndex);
- }
- }
-
-
- //return false if can't increment anymore
- //TODO: DRY this code up with what's in ChainedAggregatorImpl
- private boolean increment(List[] lengths, int[] indices, int j, int overrideIndex) {
- if(j==-1) return false;
- if(j==overrideIndex) {
- return increment(lengths, indices, j-1, overrideIndex);
- }
- indices[j]++;
- if(indices[j] >= lengths[j].size()) {
- indices[j] = 0;
- return increment(lengths, indices, j-1, overrideIndex);
- }
- return true;
- }
-
- public static class JoinState {
- List<List>[] sides;
- int numSidesReceived = 0;
- int[] indices;
- TridentTuple group;
-
- public JoinState(int numSides, TridentTuple group) {
- sides = new List[numSides];
- indices = new int[numSides];
- this.group = group;
- for(int i=0; i<numSides; i++) {
- sides[i] = new ArrayList<List>();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggStateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggStateUpdater.java b/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggStateUpdater.java
deleted file mode 100644
index 647d30f..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggStateUpdater.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Values;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.state.ReducerValueUpdater;
-import storm.trident.state.StateUpdater;
-import storm.trident.state.snapshot.Snapshottable;
-import storm.trident.tuple.TridentTuple;
-
-public class ReducerAggStateUpdater implements StateUpdater<Snapshottable> {
- ReducerAggregator _agg;
-
- public ReducerAggStateUpdater(ReducerAggregator agg) {
- _agg = agg;
- }
-
-
- @Override
- public void updateState(Snapshottable state, List<TridentTuple> tuples, TridentCollector collector) {
- Object newVal = state.update(new ReducerValueUpdater(_agg, tuples));
- collector.emit(new Values(newVal));
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggregatorImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggregatorImpl.java b/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggregatorImpl.java
deleted file mode 100644
index c047762..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/ReducerAggregatorImpl.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package storm.trident.operation.impl;
-
-import backtype.storm.tuple.Values;
-import java.util.Map;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-public class ReducerAggregatorImpl implements Aggregator<Result> {
- ReducerAggregator _agg;
-
- public ReducerAggregatorImpl(ReducerAggregator agg) {
- _agg = agg;
- }
-
- public void prepare(Map conf, TridentOperationContext context) {
-
- }
-
- public Result init(Object batchId, TridentCollector collector) {
- Result ret = new Result();
- ret.obj = _agg.init();
- return ret;
- }
-
- public void aggregate(Result val, TridentTuple tuple, TridentCollector collector) {
- val.obj = _agg.reduce(val.obj, tuple);
- }
-
- public void complete(Result val, TridentCollector collector) {
- collector.emit(new Values(val.obj));
- }
-
- public void cleanup() {
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/Result.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/Result.java b/jstorm-client/src/main/java/storm/trident/operation/impl/Result.java
deleted file mode 100644
index 3748a7a..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/Result.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package storm.trident.operation.impl;
-
-public class Result {
- public Object obj;
-
- @Override
- public String toString() {
- return "" + obj;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/SingleEmitAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/SingleEmitAggregator.java b/jstorm-client/src/main/java/storm/trident/operation/impl/SingleEmitAggregator.java
deleted file mode 100644
index 4be7c45..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/SingleEmitAggregator.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.operation.impl.SingleEmitAggregator.SingleEmitState;
-import storm.trident.tuple.TridentTuple;
-
-
-public class SingleEmitAggregator implements Aggregator<SingleEmitState> {
- public static interface BatchToPartition extends Serializable {
- int partitionIndex(Object batchId, int numPartitions);
- }
-
- static class SingleEmitState {
- boolean received = false;
- Object state;
- Object batchId;
-
- public SingleEmitState(Object batchId) {
- this.batchId = batchId;
- }
- }
-
- Aggregator _agg;
- BatchToPartition _batchToPartition;
-
- public SingleEmitAggregator(Aggregator agg, BatchToPartition batchToPartition) {
- _agg = agg;
- _batchToPartition = batchToPartition;
- }
-
-
- @Override
- public SingleEmitState init(Object batchId, TridentCollector collector) {
- return new SingleEmitState(batchId);
- }
-
- @Override
- public void aggregate(SingleEmitState val, TridentTuple tuple, TridentCollector collector) {
- if(!val.received) {
- val.state = _agg.init(val.batchId, collector);
- val.received = true;
- }
- _agg.aggregate(val.state, tuple, collector);
- }
-
- @Override
- public void complete(SingleEmitState val, TridentCollector collector) {
- if(!val.received) {
- if(this.myPartitionIndex == _batchToPartition.partitionIndex(val.batchId, this.totalPartitions)) {
- val.state = _agg.init(val.batchId, collector);
- _agg.complete(val.state, collector);
- }
- } else {
- _agg.complete(val.state, collector);
- }
- }
-
- int myPartitionIndex;
- int totalPartitions;
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _agg.prepare(conf, context);
- this.myPartitionIndex = context.getPartitionIndex();
- this.totalPartitions = context.numPartitions();
- }
-
- @Override
- public void cleanup() {
- _agg.cleanup();
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/impl/TrueFilter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/impl/TrueFilter.java b/jstorm-client/src/main/java/storm/trident/operation/impl/TrueFilter.java
deleted file mode 100644
index 6e9d15c..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/impl/TrueFilter.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package storm.trident.operation.impl;
-
-import java.util.Map;
-import storm.trident.operation.Filter;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.tuple.TridentTuple;
-
-public class TrueFilter implements Filter {
-
- @Override
- public boolean isKeep(TridentTuple tuple) {
- return true;
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/partition/GlobalGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/partition/GlobalGrouping.java b/jstorm-client/src/main/java/storm/trident/partition/GlobalGrouping.java
deleted file mode 100644
index 0270bf4..0000000
--- a/jstorm-client/src/main/java/storm/trident/partition/GlobalGrouping.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package storm.trident.partition;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.task.WorkerTopologyContext;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-public class GlobalGrouping implements CustomStreamGrouping {
-
- List<Integer> target;
-
-
- @Override
- public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targets) {
- List<Integer> sorted = new ArrayList<Integer>(targets);
- Collections.sort(sorted);
- target = Arrays.asList(sorted.get(0));
- }
-
- @Override
- public List<Integer> chooseTasks(int i, List<Object> list) {
- return target;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/partition/IdentityGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/partition/IdentityGrouping.java b/jstorm-client/src/main/java/storm/trident/partition/IdentityGrouping.java
deleted file mode 100644
index ccb9d6e..0000000
--- a/jstorm-client/src/main/java/storm/trident/partition/IdentityGrouping.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package storm.trident.partition;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.task.WorkerTopologyContext;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-
-public class IdentityGrouping implements CustomStreamGrouping {
-
- List<Integer> ret = new ArrayList<Integer>();
- Map<Integer, List<Integer>> _precomputed = new HashMap();
-
- @Override
- public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> tasks) {
- List<Integer> sourceTasks = new ArrayList<Integer>(context.getComponentTasks(stream.get_componentId()));
- Collections.sort(sourceTasks);
- if(sourceTasks.size()!=tasks.size()) {
- throw new RuntimeException("Can only do an identity grouping when source and target have same number of tasks");
- }
- tasks = new ArrayList<Integer>(tasks);
- Collections.sort(tasks);
- for(int i=0; i<sourceTasks.size(); i++) {
- int s = sourceTasks.get(i);
- int t = tasks.get(i);
- _precomputed.put(s, Arrays.asList(t));
- }
- }
-
- @Override
- public List<Integer> chooseTasks(int task, List<Object> values) {
- List<Integer> ret = _precomputed.get(task);
- if(ret==null) {
- throw new RuntimeException("Tuple emitted by task that's not part of this component. Should be impossible");
- }
- return ret;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/partition/IndexHashGrouping.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/partition/IndexHashGrouping.java b/jstorm-client/src/main/java/storm/trident/partition/IndexHashGrouping.java
deleted file mode 100644
index 69c36ac..0000000
--- a/jstorm-client/src/main/java/storm/trident/partition/IndexHashGrouping.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package storm.trident.partition;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.task.WorkerTopologyContext;
-import java.util.Arrays;
-import java.util.List;
-
-public class IndexHashGrouping implements CustomStreamGrouping {
- public static int objectToIndex(Object val, int numPartitions) {
- if(val==null) return 0;
- else {
- return Math.abs(val.hashCode() % numPartitions);
- }
- }
-
- int _index;
- List<Integer> _targets;
-
- public IndexHashGrouping(int index) {
- _index = index;
- }
-
-
- @Override
- public void prepare(WorkerTopologyContext context, GlobalStreamId stream, List<Integer> targetTasks) {
- _targets = targetTasks;
- }
-
- @Override
- public List<Integer> chooseTasks(int fromTask, List<Object> values) {
- int i = objectToIndex(values.get(_index), _targets.size());
- return Arrays.asList(_targets.get(i));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/BridgeReceiver.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/BridgeReceiver.java b/jstorm-client/src/main/java/storm/trident/planner/BridgeReceiver.java
deleted file mode 100644
index b596d54..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/BridgeReceiver.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import storm.trident.tuple.ConsList;
-import storm.trident.tuple.TridentTuple;
-
-
-public class BridgeReceiver implements TupleReceiver {
-
- BatchOutputCollector _collector;
-
- public BridgeReceiver(BatchOutputCollector collector) {
- _collector = collector;
- }
-
- @Override
- public void execute(ProcessorContext context, String streamId, TridentTuple tuple) {
- _collector.emit(streamId, new ConsList(context.batchId, tuple));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/Node.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/Node.java b/jstorm-client/src/main/java/storm/trident/planner/Node.java
deleted file mode 100644
index 1a0e29d..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/Node.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.tuple.Fields;
-
-import java.io.Serializable;
-import java.util.UUID;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-public class Node implements Serializable {
- private static AtomicInteger INDEX = new AtomicInteger(0);
-
- private String nodeId;
-
- public String name = null;
- public Fields allOutputFields;
- public String streamId;
- public Integer parallelismHint = null;
- public NodeStateInfo stateInfo = null;
- public int creationIndex;
-
- public Node(String streamId, String name, Fields allOutputFields) {
- this.nodeId = UUID.randomUUID().toString();
- this.allOutputFields = allOutputFields;
- this.streamId = streamId;
- this.name = name;
- this.creationIndex = INDEX.incrementAndGet();
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((nodeId == null) ? 0 : nodeId.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- Node other = (Node) obj;
- if (nodeId == null) {
- if (other.nodeId != null)
- return false;
- } else if (!nodeId.equals(other.nodeId))
- return false;
- return true;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.MULTI_LINE_STYLE);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/NodeStateInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/NodeStateInfo.java b/jstorm-client/src/main/java/storm/trident/planner/NodeStateInfo.java
deleted file mode 100644
index a045eef..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/NodeStateInfo.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.planner;
-
-import java.io.Serializable;
-import storm.trident.state.StateSpec;
-
-public class NodeStateInfo implements Serializable {
- public String id;
- public StateSpec spec;
-
- public NodeStateInfo(String id, StateSpec spec) {
- this.id = id;
- this.spec = spec;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/PartitionNode.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/PartitionNode.java b/jstorm-client/src/main/java/storm/trident/planner/PartitionNode.java
deleted file mode 100644
index fdde133..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/PartitionNode.java
+++ /dev/null
@@ -1,35 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.generated.Grouping;
-import backtype.storm.tuple.Fields;
-import java.io.IOException;
-import java.io.ObjectInputStream;
-import java.io.ObjectOutputStream;
-import java.util.ArrayList;
-import java.util.List;
-import storm.trident.util.TridentUtils;
-
-
-public class PartitionNode extends Node {
- public transient Grouping thriftGrouping;
-
- //has the streamid/outputFields of the node it's doing the partitioning on
- public PartitionNode(String streamId, String name, Fields allOutputFields, Grouping grouping) {
- super(streamId, name, allOutputFields);
- this.thriftGrouping = grouping;
- }
-
- private void writeObject(ObjectOutputStream oos) throws IOException {
- oos.defaultWriteObject();
- byte[] ser = TridentUtils.thriftSerialize(thriftGrouping);
- oos.writeInt(ser.length);
- oos.write(ser);
- }
-
- private void readObject(ObjectInputStream ois) throws ClassNotFoundException, IOException {
- ois.defaultReadObject();
- byte[] ser = new byte[ois.readInt()];
- ois.readFully(ser);
- this.thriftGrouping = TridentUtils.thriftDeserialize(Grouping.class, ser);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/ProcessorContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/ProcessorContext.java b/jstorm-client/src/main/java/storm/trident/planner/ProcessorContext.java
deleted file mode 100644
index dc8bb6a..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/ProcessorContext.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package storm.trident.planner;
-
-
-public class ProcessorContext {
- public Object batchId;
- public Object[] state;
-
- public ProcessorContext(Object batchId, Object[] state) {
- this.batchId = batchId;
- this.state = state;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/ProcessorNode.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/ProcessorNode.java b/jstorm-client/src/main/java/storm/trident/planner/ProcessorNode.java
deleted file mode 100644
index c0e09aa..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/ProcessorNode.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.tuple.Fields;
-
-public class ProcessorNode extends Node {
-
- public boolean committer; // for partitionpersist
- public TridentProcessor processor;
- public Fields selfOutFields;
-
- public ProcessorNode(String streamId, String name, Fields allOutputFields, Fields selfOutFields, TridentProcessor processor) {
- super(streamId, name, allOutputFields);
- this.processor = processor;
- this.selfOutFields = selfOutFields;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/SpoutNode.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/SpoutNode.java b/jstorm-client/src/main/java/storm/trident/planner/SpoutNode.java
deleted file mode 100644
index 1432c43..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/SpoutNode.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.tuple.Fields;
-
-
-public class SpoutNode extends Node {
- public static enum SpoutType {
- DRPC,
- BATCH
- }
-
- public Object spout;
- public String txId; //where state is stored in zookeeper (only for batch spout types)
- public SpoutType type;
-
- public SpoutNode(String streamId, Fields allOutputFields, String txid, Object spout, SpoutType type) {
- super(streamId, null, allOutputFields);
- this.txId = txid;
- this.spout = spout;
- this.type = type;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/SubtopologyBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/SubtopologyBolt.java b/jstorm-client/src/main/java/storm/trident/planner/SubtopologyBolt.java
deleted file mode 100644
index 596c15d..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/SubtopologyBolt.java
+++ /dev/null
@@ -1,201 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import org.jgrapht.DirectedGraph;
-import org.jgrapht.graph.DirectedSubgraph;
-import org.jgrapht.traverse.TopologicalOrderIterator;
-import storm.trident.planner.processor.TridentContext;
-import storm.trident.state.State;
-import storm.trident.topology.BatchInfo;
-import storm.trident.topology.ITridentBatchBolt;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-import storm.trident.tuple.TridentTupleView.RootFactory;
-import storm.trident.util.TridentUtils;
-
-// TODO: parameterizing it like this with everything might be a high deserialization cost if there's lots of tasks?
-// TODO: memory problems?
-// TODO: can avoid these problems by adding a boltfactory abstraction, so that boltfactory is deserialized once
-// bolt factory -> returns coordinatedbolt per task, but deserializes the batch bolt one time and caches
-public class SubtopologyBolt implements ITridentBatchBolt {
- DirectedGraph _graph;
- Set<Node> _nodes;
- Map<String, InitialReceiver> _roots = new HashMap();
- Map<Node, Factory> _outputFactories = new HashMap();
- Map<String, List<TridentProcessor>> _myTopologicallyOrdered = new HashMap();
- Map<Node, String> _batchGroups;
-
- //given processornodes and static state nodes
- public SubtopologyBolt(DirectedGraph graph, Set<Node> nodes, Map<Node, String> batchGroups) {
- _nodes = nodes;
- _graph = graph;
- _batchGroups = batchGroups;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, BatchOutputCollector batchCollector) {
- int thisComponentNumTasks = context.getComponentTasks(context.getThisComponentId()).size();
- for(Node n: _nodes) {
- if(n.stateInfo!=null) {
- State s = n.stateInfo.spec.stateFactory.makeState(conf, context, context.getThisTaskIndex(), thisComponentNumTasks);
- context.setTaskData(n.stateInfo.id, s);
- }
- }
- DirectedSubgraph<Node, Object> subgraph = new DirectedSubgraph(_graph, _nodes, null);
- TopologicalOrderIterator it = new TopologicalOrderIterator<Node, Object>(subgraph);
- int stateIndex = 0;
- while(it.hasNext()) {
- Node n = (Node) it.next();
- if(n instanceof ProcessorNode) {
- ProcessorNode pn = (ProcessorNode) n;
- String batchGroup = _batchGroups.get(n);
- if(!_myTopologicallyOrdered.containsKey(batchGroup)) {
- _myTopologicallyOrdered.put(batchGroup, new ArrayList());
- }
- _myTopologicallyOrdered.get(batchGroup).add(pn.processor);
- List<String> parentStreams = new ArrayList();
- List<Factory> parentFactories = new ArrayList();
- for(Node p: TridentUtils.getParents(_graph, n)) {
- parentStreams.add(p.streamId);
- if(_nodes.contains(p)) {
- parentFactories.add(_outputFactories.get(p));
- } else {
- if(!_roots.containsKey(p.streamId)) {
- _roots.put(p.streamId, new InitialReceiver(p.streamId, getSourceOutputFields(context, p.streamId)));
- }
- _roots.get(p.streamId).addReceiver(pn.processor);
- parentFactories.add(_roots.get(p.streamId).getOutputFactory());
- }
- }
- List<TupleReceiver> targets = new ArrayList();
- boolean outgoingNode = false;
- for(Node cn: TridentUtils.getChildren(_graph, n)) {
- if(_nodes.contains(cn)) {
- targets.add(((ProcessorNode) cn).processor);
- } else {
- outgoingNode = true;
- }
- }
- if(outgoingNode) {
- targets.add(new BridgeReceiver(batchCollector));
- }
-
- TridentContext triContext = new TridentContext(
- pn.selfOutFields,
- parentFactories,
- parentStreams,
- targets,
- pn.streamId,
- stateIndex,
- batchCollector
- );
- pn.processor.prepare(conf, context, triContext);
- _outputFactories.put(n, pn.processor.getOutputFactory());
- }
- stateIndex++;
- }
- // TODO: get prepared one time into executor data... need to avoid the ser/deser
- // for each task (probably need storm to support boltfactory)
- }
-
- private Fields getSourceOutputFields(TopologyContext context, String sourceStream) {
- for(GlobalStreamId g: context.getThisSources().keySet()) {
- if(g.get_streamId().equals(sourceStream)) {
- return context.getComponentOutputFields(g);
- }
- }
- throw new RuntimeException("Could not find fields for source stream " + sourceStream);
- }
-
- @Override
- public void execute(BatchInfo batchInfo, Tuple tuple) {
- String sourceStream = tuple.getSourceStreamId();
- InitialReceiver ir = _roots.get(sourceStream);
- if(ir==null) {
- throw new RuntimeException("Received unexpected tuple " + tuple.toString());
- }
- ir.receive((ProcessorContext) batchInfo.state, tuple);
- }
-
- @Override
- public void finishBatch(BatchInfo batchInfo) {
- for(TridentProcessor p: _myTopologicallyOrdered.get(batchInfo.batchGroup)) {
- p.finishBatch((ProcessorContext) batchInfo.state);
- }
- }
-
- @Override
- public Object initBatchState(String batchGroup, Object batchId) {
- ProcessorContext ret = new ProcessorContext(batchId, new Object[_nodes.size()]);
- for(TridentProcessor p: _myTopologicallyOrdered.get(batchGroup)) {
- p.startBatch(ret);
- }
- return ret;
- }
-
- @Override
- public void cleanup() {
- for(String bg: _myTopologicallyOrdered.keySet()) {
- for(TridentProcessor p: _myTopologicallyOrdered.get(bg)) {
- p.cleanup();
- }
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for(Node n: _nodes) {
- declarer.declareStream(n.streamId, TridentUtils.fieldsConcat(new Fields("$batchId"), n.allOutputFields));
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-
- protected class InitialReceiver {
- List<TridentProcessor> _receivers = new ArrayList();
- RootFactory _factory;
- ProjectionFactory _project;
- String _stream;
-
- public InitialReceiver(String stream, Fields allFields) {
- // TODO: don't want to project for non-batch bolts...???
- // how to distinguish "batch" streams from non-batch streams?
- _stream = stream;
- _factory = new RootFactory(allFields);
- List<String> projected = new ArrayList(allFields.toList());
- projected.remove(0);
- _project = new ProjectionFactory(_factory, new Fields(projected));
- }
-
- public void receive(ProcessorContext context, Tuple tuple) {
- TridentTuple t = _project.create(_factory.create(tuple));
- for(TridentProcessor r: _receivers) {
- r.execute(context, _stream, t);
- }
- }
-
- public void addReceiver(TridentProcessor p) {
- _receivers.add(p);
- }
-
- public Factory getOutputFactory() {
- return _project;
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/TridentProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/TridentProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/TridentProcessor.java
deleted file mode 100644
index 866d058..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/TridentProcessor.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package storm.trident.planner;
-
-import backtype.storm.task.TopologyContext;
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.planner.processor.TridentContext;
-import storm.trident.tuple.TridentTuple.Factory;
-
-public interface TridentProcessor extends Serializable, TupleReceiver {
-
- // imperative that don't emit any tuples from here, since output factory cannot be gotten until
- // preparation is done, therefore, receivers won't be ready to receive tuples yet
- // can't emit tuples from here anyway, since it's not within a batch context (which is only
- // startBatch, execute, and finishBatch
- void prepare(Map conf, TopologyContext context, TridentContext tridentContext);
- void cleanup();
-
- void startBatch(ProcessorContext processorContext);
-
- void finishBatch(ProcessorContext processorContext);
-
- Factory getOutputFactory();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/TupleReceiver.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/TupleReceiver.java b/jstorm-client/src/main/java/storm/trident/planner/TupleReceiver.java
deleted file mode 100644
index a2fc148..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/TupleReceiver.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package storm.trident.planner;
-
-import storm.trident.tuple.TridentTuple;
-
-
-public interface TupleReceiver {
- //streaId indicates where tuple came from
- void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple);
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/AggregateProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/AggregateProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/AggregateProcessor.java
deleted file mode 100644
index ce62790..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/AggregateProcessor.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class AggregateProcessor implements TridentProcessor {
- Aggregator _agg;
- TridentContext _context;
- FreshCollector _collector;
- Fields _inputFields;
- ProjectionFactory _projection;
-
- public AggregateProcessor(Fields inputFields, Aggregator agg) {
- _agg = agg;
- _inputFields = inputFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- List<Factory> parents = tridentContext.getParentTupleFactories();
- if(parents.size()!=1) {
- throw new RuntimeException("Aggregate operation can only have one parent");
- }
- _context = tridentContext;
- _collector = new FreshCollector(tridentContext);
- _projection = new ProjectionFactory(parents.get(0), _inputFields);
- _agg.prepare(conf, new TridentOperationContext(context, _projection));
- }
-
- @Override
- public void cleanup() {
- _agg.cleanup();
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- _collector.setContext(processorContext);
- processorContext.state[_context.getStateIndex()] = _agg.init(processorContext.batchId, _collector);
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- _collector.setContext(processorContext);
- _agg.aggregate(processorContext.state[_context.getStateIndex()], _projection.create(tuple), _collector);
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- _collector.setContext(processorContext);
- _agg.complete(processorContext.state[_context.getStateIndex()], _collector);
- }
-
- @Override
- public Factory getOutputFactory() {
- return _collector.getOutputFactory();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/AppendCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/AppendCollector.java b/jstorm-client/src/main/java/storm/trident/planner/processor/AppendCollector.java
deleted file mode 100644
index 92932cb..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/AppendCollector.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package storm.trident.planner.processor;
-
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TupleReceiver;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView;
-import storm.trident.tuple.TridentTupleView.OperationOutputFactory;
-
-
-public class AppendCollector implements TridentCollector {
- OperationOutputFactory _factory;
- TridentContext _triContext;
- TridentTuple tuple;
- ProcessorContext context;
-
- public AppendCollector(TridentContext context) {
- _triContext = context;
- _factory = new OperationOutputFactory(context.getParentTupleFactories().get(0), context.getSelfOutputFields());
- }
-
- public void setContext(ProcessorContext pc, TridentTuple t) {
- this.context = pc;
- this.tuple = t;
- }
-
- @Override
- public void emit(List<Object> values) {
- TridentTuple toEmit = _factory.create((TridentTupleView) tuple, values);
- for(TupleReceiver r: _triContext.getReceivers()) {
- r.execute(context, _triContext.getOutStreamId(), toEmit);
- }
- }
-
- @Override
- public void reportError(Throwable t) {
- _triContext.getDelegateCollector().reportError(t);
- }
-
- public Factory getOutputFactory() {
- return _factory;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/EachProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/EachProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/EachProcessor.java
deleted file mode 100644
index 7b217de..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/EachProcessor.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.Function;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class EachProcessor implements TridentProcessor {
- Function _function;
- TridentContext _context;
- AppendCollector _collector;
- Fields _inputFields;
- ProjectionFactory _projection;
-
- public EachProcessor(Fields inputFields, Function function) {
- _function = function;
- _inputFields = inputFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- List<Factory> parents = tridentContext.getParentTupleFactories();
- if(parents.size()!=1) {
- throw new RuntimeException("Each operation can only have one parent");
- }
- _context = tridentContext;
- _collector = new AppendCollector(tridentContext);
- _projection = new ProjectionFactory(parents.get(0), _inputFields);
- _function.prepare(conf, new TridentOperationContext(context, _projection));
- }
-
- @Override
- public void cleanup() {
- _function.cleanup();
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- _collector.setContext(processorContext, tuple);
- _function.execute(_projection.create(tuple), _collector);
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- }
-
- @Override
- public Factory getOutputFactory() {
- return _collector.getOutputFactory();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/FreshCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/FreshCollector.java b/jstorm-client/src/main/java/storm/trident/planner/processor/FreshCollector.java
deleted file mode 100644
index 1fb3aa6..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/FreshCollector.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package storm.trident.planner.processor;
-
-import java.util.List;
-import storm.trident.operation.TridentCollector;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TupleReceiver;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.FreshOutputFactory;
-
-
-public class FreshCollector implements TridentCollector {
- FreshOutputFactory _factory;
- TridentContext _triContext;
- ProcessorContext context;
-
- public FreshCollector(TridentContext context) {
- _triContext = context;
- _factory = new FreshOutputFactory(context.getSelfOutputFields());
- }
-
- public void setContext(ProcessorContext pc) {
- this.context = pc;
- }
-
- @Override
- public void emit(List<Object> values) {
- TridentTuple toEmit = _factory.create(values);
- for(TupleReceiver r: _triContext.getReceivers()) {
- r.execute(context, _triContext.getOutStreamId(), toEmit);
- }
- }
-
- @Override
- public void reportError(Throwable t) {
- _triContext.getDelegateCollector().reportError(t);
- }
-
- public Factory getOutputFactory() {
- return _factory;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/MultiReducerProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/MultiReducerProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/MultiReducerProcessor.java
deleted file mode 100644
index 1998e1a..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/MultiReducerProcessor.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.MultiReducer;
-import storm.trident.operation.TridentMultiReducerContext;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class MultiReducerProcessor implements TridentProcessor {
- MultiReducer _reducer;
- TridentContext _context;
- Map<String, Integer> _streamToIndex;
- List<Fields> _projectFields;
- ProjectionFactory[] _projectionFactories;
- FreshCollector _collector;
-
- public MultiReducerProcessor(List<Fields> inputFields, MultiReducer reducer) {
- _reducer = reducer;
- _projectFields = inputFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- List<Factory> parents = tridentContext.getParentTupleFactories();
- _context = tridentContext;
- _streamToIndex = new HashMap<String, Integer>();
- List<String> parentStreams = tridentContext.getParentStreams();
- for(int i=0; i<parentStreams.size(); i++) {
- _streamToIndex.put(parentStreams.get(i), i);
- }
- _projectionFactories = new ProjectionFactory[_projectFields.size()];
- for(int i=0; i<_projectFields.size(); i++) {
- _projectionFactories[i] = new ProjectionFactory(parents.get(i), _projectFields.get(i));
- }
- _collector = new FreshCollector(tridentContext);
- _reducer.prepare(conf, new TridentMultiReducerContext((List) Arrays.asList(_projectionFactories)));
- }
-
- @Override
- public void cleanup() {
- _reducer.cleanup();
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- _collector.setContext(processorContext);
- processorContext.state[_context.getStateIndex()] = _reducer.init(_collector);
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- _collector.setContext(processorContext);
- int i = _streamToIndex.get(streamId);
- _reducer.execute(processorContext.state[_context.getStateIndex()], i, _projectionFactories[i].create(tuple), _collector);
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- _collector.setContext(processorContext);
- _reducer.complete(processorContext.state[_context.getStateIndex()], _collector);
- }
-
- @Override
- public Factory getOutputFactory() {
- return _collector.getOutputFactory();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/PartitionPersistProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/PartitionPersistProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/PartitionPersistProcessor.java
deleted file mode 100644
index 5ab2357..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/PartitionPersistProcessor.java
+++ /dev/null
@@ -1,90 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import storm.trident.topology.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.state.State;
-import storm.trident.state.StateUpdater;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class PartitionPersistProcessor implements TridentProcessor {
- StateUpdater _updater;
- State _state;
- String _stateId;
- TridentContext _context;
- Fields _inputFields;
- ProjectionFactory _projection;
- FreshCollector _collector;
-
- public PartitionPersistProcessor(String stateId, Fields inputFields, StateUpdater updater) {
- _updater = updater;
- _stateId = stateId;
- _inputFields = inputFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- List<Factory> parents = tridentContext.getParentTupleFactories();
- if(parents.size()!=1) {
- throw new RuntimeException("Partition persist operation can only have one parent");
- }
- _context = tridentContext;
- _state = (State) context.getTaskData(_stateId);
- _projection = new ProjectionFactory(parents.get(0), _inputFields);
- _collector = new FreshCollector(tridentContext);
- _updater.prepare(conf, new TridentOperationContext(context, _projection));
- }
-
- @Override
- public void cleanup() {
- _updater.cleanup();
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- processorContext.state[_context.getStateIndex()] = new ArrayList<TridentTuple>();
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- ((List) processorContext.state[_context.getStateIndex()]).add(_projection.create(tuple));
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- _collector.setContext(processorContext);
- Object batchId = processorContext.batchId;
- // since this processor type is a committer, this occurs in the commit phase
- List<TridentTuple> buffer = (List) processorContext.state[_context.getStateIndex()];
-
- // don't update unless there are tuples
- // this helps out with things like global partition persist, where multiple tasks may still
- // exist for this processor. Only want the global one to do anything
- // this is also a helpful optimization that state implementations don't need to manually do
- if(buffer.size() > 0) {
- Long txid = null;
- // this is to support things like persisting off of drpc stream, which is inherently unreliable
- // and won't have a tx attempt
- if(batchId instanceof TransactionAttempt) {
- txid = ((TransactionAttempt) batchId).getTransactionId();
- }
- _state.beginCommit(txid);
- _updater.updateState(_state, buffer, _collector);
- _state.commit(txid);
- }
- }
-
- @Override
- public Factory getOutputFactory() {
- return _collector.getOutputFactory();
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/ProjectedProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/ProjectedProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/ProjectedProcessor.java
deleted file mode 100644
index c6d34e5..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/ProjectedProcessor.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.planner.TupleReceiver;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class ProjectedProcessor implements TridentProcessor {
- Fields _projectFields;
- ProjectionFactory _factory;
- TridentContext _context;
-
- public ProjectedProcessor(Fields projectFields) {
- _projectFields = projectFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- if(tridentContext.getParentTupleFactories().size()!=1) {
- throw new RuntimeException("Projection processor can only have one parent");
- }
- _context = tridentContext;
- _factory = new ProjectionFactory(tridentContext.getParentTupleFactories().get(0), _projectFields);
- }
-
- @Override
- public void cleanup() {
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- TridentTuple toEmit = _factory.create(tuple);
- for(TupleReceiver r: _context.getReceivers()) {
- r.execute(processorContext, _context.getOutStreamId(), toEmit);
- }
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- }
-
- @Override
- public Factory getOutputFactory() {
- return _factory;
- }
-}
[25/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestJob.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestJob.java b/jstorm-client/src/main/java/backtype/storm/testing/TestJob.java
deleted file mode 100644
index 35a2592..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestJob.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.ILocalCluster;
-
-/**
- * This is the core interface for the storm java testing, usually we put our
- * java unit testing logic in the run method. A sample code will be: <code>
- * Testing.withSimulatedTimeLocalCluster(new TestJob() {
- * public void run(Cluster cluster) {
- * // your testing logic here.
- * }
- * });
- */
-public interface TestJob {
- /**
- * run the testing logic with the cluster.
- *
- * @param cluster
- * the cluster which created by
- * <code>Testing.withSimulatedTimeLocalCluster</code> and
- * <code>Testing.withTrackedCluster</code>.
- */
- public void run(ILocalCluster cluster) throws Exception;
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestKryoDecorator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestKryoDecorator.java b/jstorm-client/src/main/java/backtype/storm/testing/TestKryoDecorator.java
deleted file mode 100644
index fd9053c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestKryoDecorator.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.serialization.IKryoDecorator;
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.Serializer;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-
-public class TestKryoDecorator implements IKryoDecorator {
-
- public void decorate(Kryo k) {
- k.register(TestSerObject.class);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerBolt.java
deleted file mode 100644
index 69e175e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerBolt.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.base.BaseRichBolt;
-
-public class TestPlannerBolt extends BaseRichBolt {
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
-
- }
-
- public void execute(Tuple input) {
-
- }
-
- public Fields getOutputFields() {
- return new Fields("field1", "field2");
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(getOutputFields());
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerSpout.java
deleted file mode 100644
index c00dc45..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestPlannerSpout.java
+++ /dev/null
@@ -1,67 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.Config;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import java.util.Map;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import java.util.HashMap;
-
-public class TestPlannerSpout extends BaseRichSpout {
- boolean _isDistributed;
- Fields _outFields;
-
- public TestPlannerSpout(Fields outFields, boolean isDistributed) {
- _isDistributed = isDistributed;
- _outFields = outFields;
- }
-
- public TestPlannerSpout(boolean isDistributed) {
- this(new Fields("field1", "field2"), isDistributed);
- }
-
- public TestPlannerSpout(Fields outFields) {
- this(outFields, true);
- }
-
- public Fields getOutputFields() {
- return _outFields;
- }
-
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
-
- }
-
- public void close() {
-
- }
-
- public void nextTuple() {
- Utils.sleep(100);
- }
-
- public void ack(Object msgId) {
-
- }
-
- public void fail(Object msgId) {
-
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(getOutputFields());
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Map<String, Object> ret = new HashMap<String, Object>();
- if (!_isDistributed) {
- ret.put(Config.TOPOLOGY_MAX_TASK_PARALLELISM, 1);
- }
- return ret;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestSerObject.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestSerObject.java b/jstorm-client/src/main/java/backtype/storm/testing/TestSerObject.java
deleted file mode 100644
index ab356a3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestSerObject.java
+++ /dev/null
@@ -1,40 +0,0 @@
-package backtype.storm.testing;
-
-import java.io.Serializable;
-
-public class TestSerObject implements Serializable {
- public int f1;
- public int f2;
-
- public TestSerObject(int f1, int f2) {
- this.f1 = f1;
- this.f2 = f2;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + f1;
- result = prime * result + f2;
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- TestSerObject other = (TestSerObject) obj;
- if (f1 != other.f1)
- return false;
- if (f2 != other.f2)
- return false;
- return true;
- }
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestWordCounter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestWordCounter.java b/jstorm-client/src/main/java/backtype/storm/testing/TestWordCounter.java
deleted file mode 100644
index 3572b9d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestWordCounter.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.topology.base.BaseBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import java.util.HashMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import static backtype.storm.utils.Utils.tuple;
-
-public class TestWordCounter extends BaseBasicBolt {
- public static Logger LOG = LoggerFactory.getLogger(TestWordCounter.class);
-
- Map<String, Integer> _counts;
-
- public void prepare(Map stormConf, TopologyContext context) {
- _counts = new HashMap<String, Integer>();
- }
-
- public void execute(Tuple input, BasicOutputCollector collector) {
- String word = (String) input.getValues().get(0);
- int count = 0;
- if (_counts.containsKey(word)) {
- count = _counts.get(word);
- }
- count++;
- _counts.put(word, count);
- collector.emit(tuple(word, count));
- }
-
- public void cleanup() {
-
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("word", "count"));
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TestWordSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TestWordSpout.java b/jstorm-client/src/main/java/backtype/storm/testing/TestWordSpout.java
deleted file mode 100644
index 432e5de..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TestWordSpout.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.Config;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import java.util.Map;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-import java.util.HashMap;
-import java.util.Random;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TestWordSpout extends BaseRichSpout {
- public static Logger LOG = LoggerFactory.getLogger(TestWordSpout.class);
- boolean _isDistributed;
- SpoutOutputCollector _collector;
-
- public TestWordSpout() {
- this(true);
- }
-
- public TestWordSpout(boolean isDistributed) {
- _isDistributed = isDistributed;
- }
-
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _collector = collector;
- }
-
- public void close() {
-
- }
-
- public void nextTuple() {
- Utils.sleep(100);
- final String[] words = new String[] { "nathan", "mike", "jackson",
- "golda", "bertels" };
- final Random rand = new Random();
- final String word = words[rand.nextInt(words.length)];
- _collector.emit(new Values(word));
- }
-
- public void ack(Object msgId) {
-
- }
-
- public void fail(Object msgId) {
-
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("word"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- if (!_isDistributed) {
- Map<String, Object> ret = new HashMap<String, Object>();
- ret.put(Config.TOPOLOGY_MAX_TASK_PARALLELISM, 1);
- return ret;
- } else {
- return null;
- }
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TrackedTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TrackedTopology.java b/jstorm-client/src/main/java/backtype/storm/testing/TrackedTopology.java
deleted file mode 100644
index dc98ba5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TrackedTopology.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package backtype.storm.testing;
-
-import java.util.HashMap;
-import java.util.Map;
-
-import backtype.storm.generated.StormTopology;
-import clojure.lang.Keyword;
-
-public class TrackedTopology extends HashMap {
- public TrackedTopology(Map map) {
- super(map);
- }
-
- public StormTopology getTopology() {
- return (StormTopology) get(Keyword.intern("topology"));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/testing/TupleCaptureBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/testing/TupleCaptureBolt.java b/jstorm-client/src/main/java/backtype/storm/testing/TupleCaptureBolt.java
deleted file mode 100644
index 33c07b7..0000000
--- a/jstorm-client/src/main/java/backtype/storm/testing/TupleCaptureBolt.java
+++ /dev/null
@@ -1,68 +0,0 @@
-package backtype.storm.testing;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.UUID;
-
-public class TupleCaptureBolt implements IRichBolt {
- public static transient Map<String, Map<String, List<FixedTuple>>> emitted_tuples = new HashMap<String, Map<String, List<FixedTuple>>>();
-
- private String _name;
- private transient OutputCollector _collector;
-
- public TupleCaptureBolt() {
- _name = UUID.randomUUID().toString();
- emitted_tuples.put(_name, new HashMap<String, List<FixedTuple>>());
- }
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _collector = collector;
- }
-
- public void execute(Tuple input) {
- String component = input.getSourceComponent();
- Map<String, List<FixedTuple>> captured = emitted_tuples.get(_name);
- if (!captured.containsKey(component)) {
- captured.put(component, new ArrayList<FixedTuple>());
- }
- captured.get(component).add(
- new FixedTuple(input.getSourceStreamId(), input.getValues()));
- _collector.ack(input);
- }
-
- public Map<String, List<FixedTuple>> getResults() {
- return emitted_tuples.get(_name);
- }
-
- public void cleanup() {
- }
-
- public Map<String, List<FixedTuple>> getAndRemoveResults() {
- return emitted_tuples.remove(_name);
- }
-
- public Map<String, List<FixedTuple>> getAndClearResults() {
- Map<String, List<FixedTuple>> ret = new HashMap<String, List<FixedTuple>>(
- emitted_tuples.get(_name));
- emitted_tuples.get(_name).clear();
- return ret;
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/BaseConfigurationDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/BaseConfigurationDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/BaseConfigurationDeclarer.java
deleted file mode 100644
index fcf90b5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/BaseConfigurationDeclarer.java
+++ /dev/null
@@ -1,41 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.Config;
-import java.util.HashMap;
-import java.util.Map;
-
-public abstract class BaseConfigurationDeclarer<T extends ComponentConfigurationDeclarer>
- implements ComponentConfigurationDeclarer<T> {
- @Override
- public T addConfiguration(String config, Object value) {
- Map configMap = new HashMap();
- configMap.put(config, value);
- return addConfigurations(configMap);
- }
-
- @Override
- public T setDebug(boolean debug) {
- return addConfiguration(Config.TOPOLOGY_DEBUG, debug);
- }
-
- @Override
- public T setMaxTaskParallelism(Number val) {
- if (val != null)
- val = val.intValue();
- return addConfiguration(Config.TOPOLOGY_MAX_TASK_PARALLELISM, val);
- }
-
- @Override
- public T setMaxSpoutPending(Number val) {
- if (val != null)
- val = val.intValue();
- return addConfiguration(Config.TOPOLOGY_MAX_SPOUT_PENDING, val);
- }
-
- @Override
- public T setNumTasks(Number val) {
- if (val != null)
- val = val.intValue();
- return addConfiguration(Config.TOPOLOGY_TASKS, val);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/BasicBoltExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/BasicBoltExecutor.java b/jstorm-client/src/main/java/backtype/storm/topology/BasicBoltExecutor.java
deleted file mode 100644
index 586f17c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/BasicBoltExecutor.java
+++ /dev/null
@@ -1,50 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class BasicBoltExecutor implements IRichBolt {
- public static Logger LOG = LoggerFactory.getLogger(BasicBoltExecutor.class);
-
- private IBasicBolt _bolt;
- private transient BasicOutputCollector _collector;
-
- public BasicBoltExecutor(IBasicBolt bolt) {
- _bolt = bolt;
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _bolt.declareOutputFields(declarer);
- }
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- _bolt.prepare(stormConf, context);
- _collector = new BasicOutputCollector(collector);
- }
-
- public void execute(Tuple input) {
- _collector.setContext(input);
- try {
- _bolt.execute(input, _collector);
- _collector.getOutputter().ack(input);
- } catch (FailedException e) {
- if (e instanceof ReportedFailedException) {
- _collector.reportError(e);
- }
- _collector.getOutputter().fail(input);
- }
- }
-
- public void cleanup() {
- _bolt.cleanup();
- }
-
- public Map<String, Object> getComponentConfiguration() {
- return _bolt.getComponentConfiguration();
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/BasicOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/BasicOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/topology/BasicOutputCollector.java
deleted file mode 100644
index a0a892f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/BasicOutputCollector.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.task.IOutputCollector;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import java.util.List;
-
-public class BasicOutputCollector implements IBasicOutputCollector {
- private OutputCollector out;
- private Tuple inputTuple;
-
- public BasicOutputCollector(OutputCollector out) {
- this.out = out;
- }
-
- public List<Integer> emit(String streamId, List<Object> tuple) {
- return out.emit(streamId, inputTuple, tuple);
- }
-
- public List<Integer> emit(List<Object> tuple) {
- return emit(Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- public void setContext(Tuple inputTuple) {
- this.inputTuple = inputTuple;
- }
-
- public void emitDirect(int taskId, String streamId, List<Object> tuple) {
- out.emitDirect(taskId, streamId, inputTuple, tuple);
- }
-
- public void emitDirect(int taskId, List<Object> tuple) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- protected IOutputCollector getOutputter() {
- return out;
- }
-
- public void reportError(Throwable t) {
- out.reportError(t);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/BoltDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/BoltDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/BoltDeclarer.java
deleted file mode 100644
index 0e9cd58..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/BoltDeclarer.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package backtype.storm.topology;
-
-public interface BoltDeclarer extends InputDeclarer<BoltDeclarer>,
- ComponentConfigurationDeclarer<BoltDeclarer> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/ComponentConfigurationDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/ComponentConfigurationDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/ComponentConfigurationDeclarer.java
deleted file mode 100644
index 3c59980..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/ComponentConfigurationDeclarer.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package backtype.storm.topology;
-
-import java.util.Map;
-
-public interface ComponentConfigurationDeclarer<T extends ComponentConfigurationDeclarer> {
- T addConfigurations(Map conf);
-
- T addConfiguration(String config, Object value);
-
- T setDebug(boolean debug);
-
- T setMaxTaskParallelism(Number val);
-
- T setMaxSpoutPending(Number val);
-
- @Deprecated
- T setNumTasks(Number val);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/FailedException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/FailedException.java b/jstorm-client/src/main/java/backtype/storm/topology/FailedException.java
deleted file mode 100644
index 793f53e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/FailedException.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package backtype.storm.topology;
-
-public class FailedException extends RuntimeException {
- public FailedException() {
- super();
- }
-
- public FailedException(String msg) {
- super(msg);
- }
-
- public FailedException(String msg, Throwable cause) {
- super(msg, cause);
- }
-
- public FailedException(Throwable cause) {
- super(cause);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IBasicBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IBasicBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/IBasicBolt.java
deleted file mode 100644
index e5f303e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IBasicBolt.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Tuple;
-import java.util.Map;
-
-public interface IBasicBolt extends IComponent {
- void prepare(Map stormConf, TopologyContext context);
-
- /**
- * Process the input tuple and optionally emit new tuples based on the input
- * tuple.
- *
- * All acking is managed for you. Throw a FailedException if you want to
- * fail the tuple.
- */
- void execute(Tuple input, BasicOutputCollector collector);
-
- void cleanup();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IBasicOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IBasicOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/topology/IBasicOutputCollector.java
deleted file mode 100644
index 44c0fe1..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IBasicOutputCollector.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package backtype.storm.topology;
-
-import java.util.List;
-
-public interface IBasicOutputCollector {
- List<Integer> emit(String streamId, List<Object> tuple);
-
- void emitDirect(int taskId, String streamId, List<Object> tuple);
-
- void reportError(Throwable t);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IComponent.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IComponent.java b/jstorm-client/src/main/java/backtype/storm/topology/IComponent.java
deleted file mode 100644
index 88f1450..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IComponent.java
+++ /dev/null
@@ -1,30 +0,0 @@
-package backtype.storm.topology;
-
-import java.io.Serializable;
-import java.util.Map;
-
-/**
- * Common methods for all possible components in a topology. This interface is
- * used when defining topologies using the Java API.
- */
-public interface IComponent extends Serializable {
-
- /**
- * Declare the output schema for all the streams of this topology.
- *
- * @param declarer
- * this is used to declare output stream ids, output fields, and
- * whether or not each output stream is a direct stream
- */
- void declareOutputFields(OutputFieldsDeclarer declarer);
-
- /**
- * Declare configuration specific to this component. Only a subset of the
- * "topology.*" configs can be overridden. The component configuration can
- * be further overridden when constructing the topology using
- * {@link TopologyBuilder}
- *
- */
- Map<String, Object> getComponentConfiguration();
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IRichBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IRichBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/IRichBolt.java
deleted file mode 100644
index b79126c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IRichBolt.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.task.IBolt;
-
-/**
- * When writing topologies using Java, {@link IRichBolt} and {@link IRichSpout}
- * are the main interfaces to use to implement components of the topology.
- *
- */
-public interface IRichBolt extends IBolt, IComponent {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IRichSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IRichSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/IRichSpout.java
deleted file mode 100644
index 6953f66..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IRichSpout.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.spout.ISpout;
-
-/**
- * When writing topologies using Java, {@link IRichBolt} and {@link IRichSpout}
- * are the main interfaces to use to implement components of the topology.
- *
- */
-public interface IRichSpout extends ISpout, IComponent {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/IRichStateSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/IRichStateSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/IRichStateSpout.java
deleted file mode 100644
index ffc2ec2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/IRichStateSpout.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.state.IStateSpout;
-
-public interface IRichStateSpout extends IStateSpout, IComponent {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/InputDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/InputDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/InputDeclarer.java
deleted file mode 100644
index b97daca..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/InputDeclarer.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.tuple.Fields;
-
-public interface InputDeclarer<T extends InputDeclarer> {
- public T fieldsGrouping(String componentId, Fields fields);
-
- public T fieldsGrouping(String componentId, String streamId, Fields fields);
-
- public T globalGrouping(String componentId);
-
- public T globalGrouping(String componentId, String streamId);
-
- public T shuffleGrouping(String componentId);
-
- public T shuffleGrouping(String componentId, String streamId);
-
- public T localOrShuffleGrouping(String componentId);
-
- public T localOrShuffleGrouping(String componentId, String streamId);
-
- public T localFirstGrouping(String componentId);
-
- public T localFirstGrouping(String componentId, String streamId);
-
- public T noneGrouping(String componentId);
-
- public T noneGrouping(String componentId, String streamId);
-
- public T allGrouping(String componentId);
-
- public T allGrouping(String componentId, String streamId);
-
- public T directGrouping(String componentId);
-
- public T directGrouping(String componentId, String streamId);
-
- public T customGrouping(String componentId, CustomStreamGrouping grouping);
-
- public T customGrouping(String componentId, String streamId,
- CustomStreamGrouping grouping);
-
- public T grouping(GlobalStreamId id, Grouping grouping);
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsDeclarer.java
deleted file mode 100644
index a981cc6..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsDeclarer.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.tuple.Fields;
-
-public interface OutputFieldsDeclarer {
- /**
- * Uses default stream id.
- */
- public void declare(Fields fields);
-
- public void declare(boolean direct, Fields fields);
-
- public void declareStream(String streamId, Fields fields);
-
- public void declareStream(String streamId, boolean direct, Fields fields);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsGetter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsGetter.java b/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsGetter.java
deleted file mode 100644
index 1a6c0c2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/OutputFieldsGetter.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package backtype.storm.topology;
-
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import java.util.HashMap;
-import java.util.Map;
-
-public class OutputFieldsGetter implements OutputFieldsDeclarer {
- private Map<String, StreamInfo> _fields = new HashMap<String, StreamInfo>();
-
- public void declare(Fields fields) {
- declare(false, fields);
- }
-
- public void declare(boolean direct, Fields fields) {
- declareStream(Utils.DEFAULT_STREAM_ID, direct, fields);
- }
-
- public void declareStream(String streamId, Fields fields) {
- declareStream(streamId, false, fields);
- }
-
- public void declareStream(String streamId, boolean direct, Fields fields) {
- if (_fields.containsKey(streamId)) {
- throw new IllegalArgumentException("Fields for " + streamId
- + " already set");
- }
- _fields.put(streamId, new StreamInfo(fields.toList(), direct));
- }
-
- public Map<String, StreamInfo> getFieldsDeclaration() {
- return _fields;
- }
-
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/ReportedFailedException.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/ReportedFailedException.java b/jstorm-client/src/main/java/backtype/storm/topology/ReportedFailedException.java
deleted file mode 100644
index 74bd0de..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/ReportedFailedException.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package backtype.storm.topology;
-
-public class ReportedFailedException extends FailedException {
- public ReportedFailedException() {
- super();
- }
-
- public ReportedFailedException(String msg) {
- super(msg);
- }
-
- public ReportedFailedException(String msg, Throwable cause) {
- super(msg, cause);
- }
-
- public ReportedFailedException(Throwable cause) {
- super(cause);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/SpoutDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/SpoutDeclarer.java b/jstorm-client/src/main/java/backtype/storm/topology/SpoutDeclarer.java
deleted file mode 100644
index b14d496..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/SpoutDeclarer.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package backtype.storm.topology;
-
-public interface SpoutDeclarer extends
- ComponentConfigurationDeclarer<SpoutDeclarer> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/TopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/TopologyBuilder.java b/jstorm-client/src/main/java/backtype/storm/topology/TopologyBuilder.java
deleted file mode 100644
index 2b2f55b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/TopologyBuilder.java
+++ /dev/null
@@ -1,424 +0,0 @@
-package backtype.storm.topology;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
-import backtype.storm.Config;
-import backtype.storm.generated.Bolt;
-import backtype.storm.generated.ComponentCommon;
-import backtype.storm.generated.ComponentObject;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.NullStruct;
-import backtype.storm.generated.SpoutSpec;
-import backtype.storm.generated.StateSpoutSpec;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-
-/**
- * TopologyBuilder exposes the Java API for specifying a topology for Storm to
- * execute. Topologies are Thrift structures in the end, but since the Thrift
- * API is so verbose, TopologyBuilder greatly eases the process of creating
- * topologies. The template for creating and submitting a topology looks
- * something like:
- *
- * <pre>
- * TopologyBuilder builder = new TopologyBuilder();
- *
- * builder.setSpout("1", new TestWordSpout(true), 5);
- * builder.setSpout("2", new TestWordSpout(true), 3);
- * builder.setBolt("3", new TestWordCounter(), 3)
- * .fieldsGrouping("1", new Fields("word"))
- * .fieldsGrouping("2", new Fields("word"));
- * builder.setBolt("4", new TestGlobalCount()).globalGrouping("1");
- *
- * Map conf = new HashMap();
- * conf.put(Config.TOPOLOGY_WORKERS, 4);
- *
- * StormSubmitter.submitTopology("mytopology", conf, builder.createTopology());
- * </pre>
- *
- * Running the exact same topology in local mode (in process), and configuring
- * it to log all tuples emitted, looks like the following. Note that it lets the
- * topology run for 10 seconds before shutting down the local cluster.
- *
- * <pre>
- * TopologyBuilder builder = new TopologyBuilder();
- *
- * builder.setSpout("1", new TestWordSpout(true), 5);
- * builder.setSpout("2", new TestWordSpout(true), 3);
- * builder.setBolt("3", new TestWordCounter(), 3)
- * .fieldsGrouping("1", new Fields("word"))
- * .fieldsGrouping("2", new Fields("word"));
- * builder.setBolt("4", new TestGlobalCount()).globalGrouping("1");
- *
- * Map conf = new HashMap();
- * conf.put(Config.TOPOLOGY_WORKERS, 4);
- * conf.put(Config.TOPOLOGY_DEBUG, true);
- *
- * LocalCluster cluster = new LocalCluster();
- * cluster.submitTopology("mytopology", conf, builder.createTopology());
- * Utils.sleep(10000);
- * cluster.shutdown();
- * </pre>
- *
- * <p>
- * The pattern for TopologyBuilder is to map component ids to components using
- * the setSpout and setBolt methods. Those methods return objects that are then
- * used to declare the inputs for that component.
- * </p>
- */
-public class TopologyBuilder {
- private Map<String, IRichBolt> _bolts = new HashMap<String, IRichBolt>();
- private Map<String, IRichSpout> _spouts = new HashMap<String, IRichSpout>();
- private Map<String, ComponentCommon> _commons = new HashMap<String, ComponentCommon>();
-
- // private Map<String, Map<GlobalStreamId, Grouping>> _inputs = new
- // HashMap<String, Map<GlobalStreamId, Grouping>>();
-
- private Map<String, StateSpoutSpec> _stateSpouts = new HashMap<String, StateSpoutSpec>();
-
- public StormTopology createTopology() {
- Map<String, Bolt> boltSpecs = new HashMap<String, Bolt>();
- Map<String, SpoutSpec> spoutSpecs = new HashMap<String, SpoutSpec>();
- for (String boltId : _bolts.keySet()) {
- IRichBolt bolt = _bolts.get(boltId);
- ComponentCommon common = getComponentCommon(boltId, bolt);
- boltSpecs.put(
- boltId,
- new Bolt(ComponentObject.serialized_java(Utils
- .serialize(bolt)), common));
- }
- for (String spoutId : _spouts.keySet()) {
- IRichSpout spout = _spouts.get(spoutId);
- ComponentCommon common = getComponentCommon(spoutId, spout);
- spoutSpecs.put(
- spoutId,
- new SpoutSpec(ComponentObject.serialized_java(Utils
- .serialize(spout)), common));
-
- }
- return new StormTopology(spoutSpecs, boltSpecs,
- new HashMap<String, StateSpoutSpec>());
- }
-
- /**
- * Define a new bolt in this topology with parallelism of just one thread.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this bolt's outputs.
- * @param bolt
- * the bolt
- * @return use the returned object to declare the inputs to this component
- */
- public BoltDeclarer setBolt(String id, IRichBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- /**
- * Define a new bolt in this topology with the specified amount of
- * parallelism.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this bolt's outputs.
- * @param bolt
- * the bolt
- * @param parallelism_hint
- * the number of tasks that should be assigned to execute this
- * bolt. Each task will run on a thread in a process somewhere
- * around the cluster.
- * @return use the returned object to declare the inputs to this component
- */
- public BoltDeclarer setBolt(String id, IRichBolt bolt,
- Number parallelism_hint) {
- validateUnusedId(id);
- initCommon(id, bolt, parallelism_hint);
- _bolts.put(id, bolt);
- return new BoltGetter(id);
- }
-
- /**
- * Define a new bolt in this topology. This defines a basic bolt, which is a
- * simpler to use but more restricted kind of bolt. Basic bolts are intended
- * for non-aggregation processing and automate the anchoring/acking process
- * to achieve proper reliability in the topology.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this bolt's outputs.
- * @param bolt
- * the basic bolt
- * @return use the returned object to declare the inputs to this component
- */
- public BoltDeclarer setBolt(String id, IBasicBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- /**
- * Define a new bolt in this topology. This defines a basic bolt, which is a
- * simpler to use but more restricted kind of bolt. Basic bolts are intended
- * for non-aggregation processing and automate the anchoring/acking process
- * to achieve proper reliability in the topology.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this bolt's outputs.
- * @param bolt
- * the basic bolt
- * @param parallelism_hint
- * the number of tasks that should be assigned to execute this
- * bolt. Each task will run on a thread in a process somwehere
- * around the cluster.
- * @return use the returned object to declare the inputs to this component
- */
- public BoltDeclarer setBolt(String id, IBasicBolt bolt,
- Number parallelism_hint) {
- return setBolt(id, new BasicBoltExecutor(bolt), parallelism_hint);
- }
-
- /**
- * Define a new spout in this topology.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this spout's outputs.
- * @param spout
- * the spout
- */
- public SpoutDeclarer setSpout(String id, IRichSpout spout) {
- return setSpout(id, spout, null);
- }
-
- /**
- * Define a new spout in this topology with the specified parallelism. If
- * the spout declares itself as non-distributed, the parallelism_hint will
- * be ignored and only one task will be allocated to this component.
- *
- * @param id
- * the id of this component. This id is referenced by other
- * components that want to consume this spout's outputs.
- * @param parallelism_hint
- * the number of tasks that should be assigned to execute this
- * spout. Each task will run on a thread in a process somwehere
- * around the cluster.
- * @param spout
- * the spout
- */
- public SpoutDeclarer setSpout(String id, IRichSpout spout,
- Number parallelism_hint) {
- validateUnusedId(id);
- initCommon(id, spout, parallelism_hint);
- _spouts.put(id, spout);
- return new SpoutGetter(id);
- }
-
- public void setStateSpout(String id, IRichStateSpout stateSpout) {
- setStateSpout(id, stateSpout, null);
- }
-
- public void setStateSpout(String id, IRichStateSpout stateSpout,
- Number parallelism_hint) {
- validateUnusedId(id);
- // TODO: finish
- }
-
- private void validateUnusedId(String id) {
- if (_bolts.containsKey(id)) {
- throw new IllegalArgumentException(
- "Bolt has already been declared for id " + id);
- }
- if (_spouts.containsKey(id)) {
- throw new IllegalArgumentException(
- "Spout has already been declared for id " + id);
- }
- if (_stateSpouts.containsKey(id)) {
- throw new IllegalArgumentException(
- "State spout has already been declared for id " + id);
- }
- }
-
- private ComponentCommon getComponentCommon(String id, IComponent component) {
- ComponentCommon ret = new ComponentCommon(_commons.get(id));
-
- OutputFieldsGetter getter = new OutputFieldsGetter();
- component.declareOutputFields(getter);
- ret.set_streams(getter.getFieldsDeclaration());
- return ret;
- }
-
- private void initCommon(String id, IComponent component, Number parallelism) {
- ComponentCommon common = new ComponentCommon();
- common.set_inputs(new HashMap<GlobalStreamId, Grouping>());
- if (parallelism != null)
- common.set_parallelism_hint(parallelism.intValue());
- else {
- common.set_parallelism_hint(Integer.valueOf(1));
- }
- Map conf = component.getComponentConfiguration();
- if (conf != null)
- common.set_json_conf(Utils.to_json(conf));
- _commons.put(id, common);
- }
-
- protected class ConfigGetter<T extends ComponentConfigurationDeclarer>
- extends BaseConfigurationDeclarer<T> {
- String _id;
-
- public ConfigGetter(String id) {
- _id = id;
- }
-
- @Override
- public T addConfigurations(Map conf) {
- if (conf != null && conf.containsKey(Config.TOPOLOGY_KRYO_REGISTER)) {
- throw new IllegalArgumentException(
- "Cannot set serializations for a component using fluent API");
- }
- String currConf = _commons.get(_id).get_json_conf();
- _commons.get(_id).set_json_conf(
- mergeIntoJson(parseJson(currConf), conf));
- return (T) this;
- }
- }
-
- protected class SpoutGetter extends ConfigGetter<SpoutDeclarer> implements
- SpoutDeclarer {
- public SpoutGetter(String id) {
- super(id);
- }
- }
-
- protected class BoltGetter extends ConfigGetter<BoltDeclarer> implements
- BoltDeclarer {
- private String _boltId;
-
- public BoltGetter(String boltId) {
- super(boltId);
- _boltId = boltId;
- }
-
- public BoltDeclarer fieldsGrouping(String componentId, Fields fields) {
- return fieldsGrouping(componentId, Utils.DEFAULT_STREAM_ID, fields);
- }
-
- public BoltDeclarer fieldsGrouping(String componentId, String streamId,
- Fields fields) {
- return grouping(componentId, streamId,
- Grouping.fields(fields.toList()));
- }
-
- public BoltDeclarer globalGrouping(String componentId) {
- return globalGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer globalGrouping(String componentId, String streamId) {
- return grouping(componentId, streamId,
- Grouping.fields(new ArrayList<String>()));
- }
-
- public BoltDeclarer shuffleGrouping(String componentId) {
- return shuffleGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer shuffleGrouping(String componentId, String streamId) {
- return grouping(componentId, streamId,
- Grouping.shuffle(new NullStruct()));
- }
-
- public BoltDeclarer localOrShuffleGrouping(String componentId) {
- return localOrShuffleGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer localOrShuffleGrouping(String componentId,
- String streamId) {
- return grouping(componentId, streamId,
- Grouping.local_or_shuffle(new NullStruct()));
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(String componentId) {
- return localFirstGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(String componentId,
- String streamId) {
- return grouping(componentId, streamId,
- Grouping.localFirst(new NullStruct()));
- }
-
- public BoltDeclarer noneGrouping(String componentId) {
- return noneGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer noneGrouping(String componentId, String streamId) {
- return grouping(componentId, streamId,
- Grouping.none(new NullStruct()));
- }
-
- public BoltDeclarer allGrouping(String componentId) {
- return allGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer allGrouping(String componentId, String streamId) {
- return grouping(componentId, streamId,
- Grouping.all(new NullStruct()));
- }
-
- public BoltDeclarer directGrouping(String componentId) {
- return directGrouping(componentId, Utils.DEFAULT_STREAM_ID);
- }
-
- public BoltDeclarer directGrouping(String componentId, String streamId) {
- return grouping(componentId, streamId,
- Grouping.direct(new NullStruct()));
- }
-
- private BoltDeclarer grouping(String componentId, String streamId,
- Grouping grouping) {
- _commons.get(_boltId).put_to_inputs(
- new GlobalStreamId(componentId, streamId), grouping);
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(String componentId,
- CustomStreamGrouping grouping) {
- return customGrouping(componentId, Utils.DEFAULT_STREAM_ID,
- grouping);
- }
-
- @Override
- public BoltDeclarer customGrouping(String componentId, String streamId,
- CustomStreamGrouping grouping) {
- return grouping(componentId, streamId,
- Grouping.custom_serialized(Utils.serialize(grouping)));
- }
-
- @Override
- public BoltDeclarer grouping(GlobalStreamId id, Grouping grouping) {
- return grouping(id.get_componentId(), id.get_streamId(), grouping);
- }
-
-
- }
-
- private static Map parseJson(String json) {
- if (json == null)
- return new HashMap();
- else
- return (Map) Utils.from_json(json);
- }
-
- private static String mergeIntoJson(Map into, Map newMap) {
- Map res = new HashMap(into);
- if (newMap != null)
- res.putAll(newMap);
- return Utils.to_json(res);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBasicBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBasicBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBasicBolt.java
deleted file mode 100644
index 2e18207..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBasicBolt.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IBasicBolt;
-import java.util.Map;
-
-public abstract class BaseBasicBolt extends BaseComponent implements IBasicBolt {
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBatchBolt.java
deleted file mode 100644
index 9171392..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseBatchBolt.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.coordination.IBatchBolt;
-import java.util.Map;
-
-public abstract class BaseBatchBolt<T> extends BaseComponent implements
- IBatchBolt<T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseComponent.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseComponent.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseComponent.java
deleted file mode 100644
index 1b1449a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseComponent.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.topology.IComponent;
-import java.util.Map;
-
-public abstract class BaseComponent implements IComponent {
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseOpaquePartitionedTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseOpaquePartitionedTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseOpaquePartitionedTransactionalSpout.java
deleted file mode 100644
index baf0c65..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseOpaquePartitionedTransactionalSpout.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.transactional.partitioned.IOpaquePartitionedTransactionalSpout;
-
-public abstract class BaseOpaquePartitionedTransactionalSpout<T> extends
- BaseComponent implements IOpaquePartitionedTransactionalSpout<T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BasePartitionedTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BasePartitionedTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BasePartitionedTransactionalSpout.java
deleted file mode 100644
index 2a9f298..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BasePartitionedTransactionalSpout.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.transactional.partitioned.IPartitionedTransactionalSpout;
-import java.util.Map;
-
-public abstract class BasePartitionedTransactionalSpout<T> extends
- BaseComponent implements IPartitionedTransactionalSpout<T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichBolt.java
deleted file mode 100644
index e6a7592..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichBolt.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.topology.IRichBolt;
-
-public abstract class BaseRichBolt extends BaseComponent implements IRichBolt {
- @Override
- public void cleanup() {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichSpout.java
deleted file mode 100644
index 9c0d733..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseRichSpout.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * To change this template, choose Tools | Templates
- * and open the template in the editor.
- */
-package backtype.storm.topology.base;
-
-import backtype.storm.topology.IRichSpout;
-
-/**
- *
- * @author nathan
- */
-public abstract class BaseRichSpout extends BaseComponent implements IRichSpout {
- @Override
- public void close() {
- }
-
- @Override
- public void activate() {
- }
-
- @Override
- public void deactivate() {
- }
-
- @Override
- public void ack(Object msgId) {
- }
-
- @Override
- public void fail(Object msgId) {
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalBolt.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalBolt.java
deleted file mode 100644
index 77647c0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalBolt.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.transactional.TransactionAttempt;
-
-public abstract class BaseTransactionalBolt extends
- BaseBatchBolt<TransactionAttempt> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalSpout.java
deleted file mode 100644
index 01945ea..0000000
--- a/jstorm-client/src/main/java/backtype/storm/topology/base/BaseTransactionalSpout.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.topology.base;
-
-import backtype.storm.transactional.ITransactionalSpout;
-import java.util.Map;
-
-public abstract class BaseTransactionalSpout<T> extends BaseComponent implements
- ITransactionalSpout<T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/ICommitter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/ICommitter.java b/jstorm-client/src/main/java/backtype/storm/transactional/ICommitter.java
deleted file mode 100644
index e764fb7..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/ICommitter.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package backtype.storm.transactional;
-
-/**
- * This marks an IBatchBolt within a transactional topology as a committer. This
- * causes the finishBatch method to be called in order of the transactions.
- */
-public interface ICommitter {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/ICommitterTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/ICommitterTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/transactional/ICommitterTransactionalSpout.java
deleted file mode 100644
index 58b8e19..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/ICommitterTransactionalSpout.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package backtype.storm.transactional;
-
-import backtype.storm.task.TopologyContext;
-import java.util.Map;
-
-public interface ICommitterTransactionalSpout<X> extends ITransactionalSpout<X> {
- public interface Emitter extends ITransactionalSpout.Emitter {
- void commit(TransactionAttempt attempt);
- }
-
- @Override
- public Emitter getEmitter(Map conf, TopologyContext context);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/ITransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/ITransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/transactional/ITransactionalSpout.java
deleted file mode 100644
index 13399e1..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/ITransactionalSpout.java
+++ /dev/null
@@ -1,88 +0,0 @@
-package backtype.storm.transactional;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IComponent;
-import java.math.BigInteger;
-import java.util.Map;
-
-public interface ITransactionalSpout<T> extends IComponent {
- public interface Coordinator<X> {
- /**
- * Create metadata for this particular transaction id which has never
- * been emitted before. The metadata should contain whatever is
- * necessary to be able to replay the exact batch for the transaction at
- * a later point.
- *
- * The metadata is stored in Zookeeper.
- *
- * Storm uses the Kryo serializations configured in the component
- * configuration for this spout to serialize and deserialize the
- * metadata.
- *
- * @param txid
- * The id of the transaction.
- * @param prevMetadata
- * The metadata of the previous transaction
- * @return the metadata for this new transaction
- */
- X initializeTransaction(BigInteger txid, X prevMetadata);
-
- /**
- * Returns true if its ok to emit start a new transaction, false
- * otherwise (will skip this transaction).
- *
- * You should sleep here if you want a delay between asking for the next
- * transaction (this will be called repeatedly in a loop).
- */
- boolean isReady();
-
- /**
- * Release any resources from this coordinator.
- */
- void close();
- }
-
- public interface Emitter<X> {
- /**
- * Emit a batch for the specified transaction attempt and metadata for
- * the transaction. The metadata was created by the Coordinator in the
- * initializeTranaction method. This method must always emit the same
- * batch of tuples across all tasks for the same transaction id.
- *
- * The first field of all emitted tuples must contain the provided
- * TransactionAttempt.
- *
- */
- void emitBatch(TransactionAttempt tx, X coordinatorMeta,
- BatchOutputCollector collector);
-
- /**
- * Any state for transactions prior to the provided transaction id can
- * be safely cleaned up, so this method should clean up that state.
- */
- void cleanupBefore(BigInteger txid);
-
- /**
- * Release any resources held by this emitter.
- */
- void close();
- }
-
- /**
- * The coordinator for a TransactionalSpout runs in a single thread and
- * indicates when batches of tuples should be emitted and when transactions
- * should commit. The Coordinator that you provide in a TransactionalSpout
- * provides metadata for each transaction so that the transactions can be
- * replayed.
- */
- Coordinator<T> getCoordinator(Map conf, TopologyContext context);
-
- /**
- * The emitter for a TransactionalSpout runs as many tasks across the
- * cluster. Emitters are responsible for emitting batches of tuples for a
- * transaction and must ensure that the same batch of tuples is always
- * emitted for the same transaction id.
- */
- Emitter<T> getEmitter(Map conf, TopologyContext context);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/TransactionAttempt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionAttempt.java b/jstorm-client/src/main/java/backtype/storm/transactional/TransactionAttempt.java
deleted file mode 100644
index 2d02de1..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionAttempt.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package backtype.storm.transactional;
-
-import java.math.BigInteger;
-
-public class TransactionAttempt {
- BigInteger _txid;
- long _attemptId;
-
- // for kryo compatibility
- public TransactionAttempt() {
-
- }
-
- public TransactionAttempt(BigInteger txid, long attemptId) {
- _txid = txid;
- _attemptId = attemptId;
- }
-
- public BigInteger getTransactionId() {
- return _txid;
- }
-
- public long getAttemptId() {
- return _attemptId;
- }
-
- @Override
- public int hashCode() {
- return _txid.hashCode();
- }
-
- @Override
- public boolean equals(Object o) {
- if (!(o instanceof TransactionAttempt))
- return false;
- TransactionAttempt other = (TransactionAttempt) o;
- return _txid.equals(other._txid) && _attemptId == other._attemptId;
- }
-
- @Override
- public String toString() {
- return "" + _txid + ":" + _attemptId;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutBatchExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutBatchExecutor.java b/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutBatchExecutor.java
deleted file mode 100644
index daea107..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutBatchExecutor.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package backtype.storm.transactional;
-
-import backtype.storm.coordination.BatchOutputCollectorImpl;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import java.math.BigInteger;
-import java.util.Map;
-import java.util.TreeMap;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TransactionalSpoutBatchExecutor implements IRichBolt {
- public static Logger LOG = LoggerFactory
- .getLogger(TransactionalSpoutBatchExecutor.class);
-
- BatchOutputCollectorImpl _collector;
- ITransactionalSpout _spout;
- ITransactionalSpout.Emitter _emitter;
-
- TreeMap<BigInteger, TransactionAttempt> _activeTransactions = new TreeMap<BigInteger, TransactionAttempt>();
-
- public TransactionalSpoutBatchExecutor(ITransactionalSpout spout) {
- _spout = spout;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- OutputCollector collector) {
- _collector = new BatchOutputCollectorImpl(collector);
- _emitter = _spout.getEmitter(conf, context);
- }
-
- @Override
- public void execute(Tuple input) {
- TransactionAttempt attempt = (TransactionAttempt) input.getValue(0);
- try {
- if (input.getSourceStreamId().equals(
- TransactionalSpoutCoordinator.TRANSACTION_COMMIT_STREAM_ID)) {
- if (attempt.equals(_activeTransactions.get(attempt
- .getTransactionId()))) {
- ((ICommitterTransactionalSpout.Emitter) _emitter)
- .commit(attempt);
- _activeTransactions.remove(attempt.getTransactionId());
- _collector.ack(input);
- } else {
- _collector.fail(input);
- }
- } else {
- _emitter.emitBatch(attempt, input.getValue(1), _collector);
- _activeTransactions.put(attempt.getTransactionId(), attempt);
- _collector.ack(input);
- BigInteger committed = (BigInteger) input.getValue(2);
- if (committed != null) {
- // valid to delete before what's been committed since
- // those batches will never be accessed again
- _activeTransactions.headMap(committed).clear();
- _emitter.cleanupBefore(committed);
- }
- }
- } catch (FailedException e) {
- LOG.warn("Failed to emit batch for transaction", e);
- _collector.fail(input);
- }
- }
-
- @Override
- public void cleanup() {
- _emitter.close();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _spout.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutCoordinator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutCoordinator.java b/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutCoordinator.java
deleted file mode 100644
index 4810903..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalSpoutCoordinator.java
+++ /dev/null
@@ -1,220 +0,0 @@
-package backtype.storm.transactional;
-
-import backtype.storm.Config;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.transactional.state.RotatingTransactionalState;
-import backtype.storm.transactional.state.TransactionalState;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-import java.math.BigInteger;
-import java.util.Map;
-import java.util.TreeMap;
-import java.util.Random;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TransactionalSpoutCoordinator extends BaseRichSpout {
- public static final Logger LOG = LoggerFactory
- .getLogger(TransactionalSpoutCoordinator.class);
-
- public static final BigInteger INIT_TXID = BigInteger.ONE;
-
- public static final String TRANSACTION_BATCH_STREAM_ID = TransactionalSpoutCoordinator.class
- .getName() + "/batch";
- public static final String TRANSACTION_COMMIT_STREAM_ID = TransactionalSpoutCoordinator.class
- .getName() + "/commit";
-
- private static final String CURRENT_TX = "currtx";
- private static final String META_DIR = "meta";
-
- private ITransactionalSpout _spout;
- private ITransactionalSpout.Coordinator _coordinator;
- private TransactionalState _state;
- private RotatingTransactionalState _coordinatorState;
-
- TreeMap<BigInteger, TransactionStatus> _activeTx = new TreeMap<BigInteger, TransactionStatus>();
-
- private SpoutOutputCollector _collector;
- private Random _rand;
- BigInteger _currTransaction;
- int _maxTransactionActive;
- StateInitializer _initializer;
-
- public TransactionalSpoutCoordinator(ITransactionalSpout spout) {
- _spout = spout;
- }
-
- public ITransactionalSpout getSpout() {
- return _spout;
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _rand = new Random(Utils.secureRandomLong());
- _state = TransactionalState.newCoordinatorState(conf,
- (String) conf.get(Config.TOPOLOGY_TRANSACTIONAL_ID),
- _spout.getComponentConfiguration());
- _coordinatorState = new RotatingTransactionalState(_state, META_DIR,
- true);
- _collector = collector;
- _coordinator = _spout.getCoordinator(conf, context);
- _currTransaction = getStoredCurrTransaction(_state);
- Object active = conf.get(Config.TOPOLOGY_MAX_SPOUT_PENDING);
- if (active == null) {
- _maxTransactionActive = 1;
- } else {
- _maxTransactionActive = Utils.getInt(active);
- }
- _initializer = new StateInitializer();
- }
-
- @Override
- public void close() {
- _state.close();
- }
-
- @Override
- public void nextTuple() {
- sync();
- }
-
- @Override
- public void ack(Object msgId) {
- TransactionAttempt tx = (TransactionAttempt) msgId;
- TransactionStatus status = _activeTx.get(tx.getTransactionId());
- if (status != null && tx.equals(status.attempt)) {
- if (status.status == AttemptStatus.PROCESSING) {
- status.status = AttemptStatus.PROCESSED;
- } else if (status.status == AttemptStatus.COMMITTING) {
- _activeTx.remove(tx.getTransactionId());
- _coordinatorState.cleanupBefore(tx.getTransactionId());
- _currTransaction = nextTransactionId(tx.getTransactionId());
- _state.setData(CURRENT_TX, _currTransaction);
- }
- sync();
- }
- }
-
- @Override
- public void fail(Object msgId) {
- TransactionAttempt tx = (TransactionAttempt) msgId;
- TransactionStatus stored = _activeTx.remove(tx.getTransactionId());
- if (stored != null && tx.equals(stored.attempt)) {
- _activeTx.tailMap(tx.getTransactionId()).clear();
- sync();
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- // in partitioned example, in case an emitter task receives a later
- // transaction than it's emitted so far,
- // when it sees the earlier txid it should know to emit nothing
- declarer.declareStream(TRANSACTION_BATCH_STREAM_ID, new Fields("tx",
- "tx-meta", "committed-txid"));
- declarer.declareStream(TRANSACTION_COMMIT_STREAM_ID, new Fields("tx"));
- }
-
- private void sync() {
- // note that sometimes the tuples active may be less than
- // max_spout_pending, e.g.
- // max_spout_pending = 3
- // tx 1, 2, 3 active, tx 2 is acked. there won't be a commit for tx 2
- // (because tx 1 isn't committed yet),
- // and there won't be a batch for tx 4 because there's max_spout_pending
- // tx active
- TransactionStatus maybeCommit = _activeTx.get(_currTransaction);
- if (maybeCommit != null
- && maybeCommit.status == AttemptStatus.PROCESSED) {
- maybeCommit.status = AttemptStatus.COMMITTING;
- _collector.emit(TRANSACTION_COMMIT_STREAM_ID, new Values(
- maybeCommit.attempt), maybeCommit.attempt);
- }
-
- try {
- if (_activeTx.size() < _maxTransactionActive) {
- BigInteger curr = _currTransaction;
- for (int i = 0; i < _maxTransactionActive; i++) {
- if ((_coordinatorState.hasCache(curr) || _coordinator
- .isReady()) && !_activeTx.containsKey(curr)) {
- TransactionAttempt attempt = new TransactionAttempt(
- curr, _rand.nextLong());
- Object state = _coordinatorState.getState(curr,
- _initializer);
- _activeTx.put(curr, new TransactionStatus(attempt));
- _collector
- .emit(TRANSACTION_BATCH_STREAM_ID,
- new Values(
- attempt,
- state,
- previousTransactionId(_currTransaction)),
- attempt);
- }
- curr = nextTransactionId(curr);
- }
- }
- } catch (FailedException e) {
- LOG.warn("Failed to get metadata for a transaction", e);
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Config ret = new Config();
- ret.setMaxTaskParallelism(1);
- return ret;
- }
-
- private static enum AttemptStatus {
- PROCESSING, PROCESSED, COMMITTING
- }
-
- private static class TransactionStatus {
- TransactionAttempt attempt;
- AttemptStatus status;
-
- public TransactionStatus(TransactionAttempt attempt) {
- this.attempt = attempt;
- this.status = AttemptStatus.PROCESSING;
- }
-
- @Override
- public String toString() {
- return attempt.toString() + " <" + status.toString() + ">";
- }
- }
-
- private BigInteger nextTransactionId(BigInteger id) {
- return id.add(BigInteger.ONE);
- }
-
- private BigInteger previousTransactionId(BigInteger id) {
- if (id.equals(INIT_TXID)) {
- return null;
- } else {
- return id.subtract(BigInteger.ONE);
- }
- }
-
- private BigInteger getStoredCurrTransaction(TransactionalState state) {
- BigInteger ret = (BigInteger) state.getData(CURRENT_TX);
- if (ret == null)
- return INIT_TXID;
- else
- return ret;
- }
-
- private class StateInitializer implements
- RotatingTransactionalState.StateInitializer {
- @Override
- public Object init(BigInteger txid, Object lastState) {
- return _coordinator.initializeTransaction(txid, lastState);
- }
- }
-}
[42/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/BoltStats.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/BoltStats.java b/jstorm-client/src/main/java/backtype/storm/generated/BoltStats.java
deleted file mode 100644
index 31dced0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/BoltStats.java
+++ /dev/null
@@ -1,1111 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class BoltStats implements org.apache.thrift7.TBase<BoltStats, BoltStats._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("BoltStats");
-
- private static final org.apache.thrift7.protocol.TField ACKED_FIELD_DESC = new org.apache.thrift7.protocol.TField("acked", org.apache.thrift7.protocol.TType.MAP, (short)1);
- private static final org.apache.thrift7.protocol.TField FAILED_FIELD_DESC = new org.apache.thrift7.protocol.TField("failed", org.apache.thrift7.protocol.TType.MAP, (short)2);
- private static final org.apache.thrift7.protocol.TField PROCESS_MS_AVG_FIELD_DESC = new org.apache.thrift7.protocol.TField("process_ms_avg", org.apache.thrift7.protocol.TType.MAP, (short)3);
- private static final org.apache.thrift7.protocol.TField EXECUTED_FIELD_DESC = new org.apache.thrift7.protocol.TField("executed", org.apache.thrift7.protocol.TType.MAP, (short)4);
- private static final org.apache.thrift7.protocol.TField EXECUTE_MS_AVG_FIELD_DESC = new org.apache.thrift7.protocol.TField("execute_ms_avg", org.apache.thrift7.protocol.TType.MAP, (short)5);
-
- private Map<String,Map<GlobalStreamId,Long>> acked; // required
- private Map<String,Map<GlobalStreamId,Long>> failed; // required
- private Map<String,Map<GlobalStreamId,Double>> process_ms_avg; // required
- private Map<String,Map<GlobalStreamId,Long>> executed; // required
- private Map<String,Map<GlobalStreamId,Double>> execute_ms_avg; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ACKED((short)1, "acked"),
- FAILED((short)2, "failed"),
- PROCESS_MS_AVG((short)3, "process_ms_avg"),
- EXECUTED((short)4, "executed"),
- EXECUTE_MS_AVG((short)5, "execute_ms_avg");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ACKED
- return ACKED;
- case 2: // FAILED
- return FAILED;
- case 3: // PROCESS_MS_AVG
- return PROCESS_MS_AVG;
- case 4: // EXECUTED
- return EXECUTED;
- case 5: // EXECUTE_MS_AVG
- return EXECUTE_MS_AVG;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ACKED, new org.apache.thrift7.meta_data.FieldMetaData("acked", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.FAILED, new org.apache.thrift7.meta_data.FieldMetaData("failed", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.PROCESS_MS_AVG, new org.apache.thrift7.meta_data.FieldMetaData("process_ms_avg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- tmpMap.put(_Fields.EXECUTED, new org.apache.thrift7.meta_data.FieldMetaData("executed", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I64)))));
- tmpMap.put(_Fields.EXECUTE_MS_AVG, new org.apache.thrift7.meta_data.FieldMetaData("execute_ms_avg", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, GlobalStreamId.class),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(BoltStats.class, metaDataMap);
- }
-
- public BoltStats() {
- }
-
- public BoltStats(
- Map<String,Map<GlobalStreamId,Long>> acked,
- Map<String,Map<GlobalStreamId,Long>> failed,
- Map<String,Map<GlobalStreamId,Double>> process_ms_avg,
- Map<String,Map<GlobalStreamId,Long>> executed,
- Map<String,Map<GlobalStreamId,Double>> execute_ms_avg)
- {
- this();
- this.acked = acked;
- this.failed = failed;
- this.process_ms_avg = process_ms_avg;
- this.executed = executed;
- this.execute_ms_avg = execute_ms_avg;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public BoltStats(BoltStats other) {
- if (other.is_set_acked()) {
- Map<String,Map<GlobalStreamId,Long>> __this__acked = new HashMap<String,Map<GlobalStreamId,Long>>();
- for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.acked.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Long> other_element_value = other_element.getValue();
-
- String __this__acked_copy_key = other_element_key;
-
- Map<GlobalStreamId,Long> __this__acked_copy_value = new HashMap<GlobalStreamId,Long>();
- for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__acked_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Long __this__acked_copy_value_copy_value = other_element_value_element_value;
-
- __this__acked_copy_value.put(__this__acked_copy_value_copy_key, __this__acked_copy_value_copy_value);
- }
-
- __this__acked.put(__this__acked_copy_key, __this__acked_copy_value);
- }
- this.acked = __this__acked;
- }
- if (other.is_set_failed()) {
- Map<String,Map<GlobalStreamId,Long>> __this__failed = new HashMap<String,Map<GlobalStreamId,Long>>();
- for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.failed.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Long> other_element_value = other_element.getValue();
-
- String __this__failed_copy_key = other_element_key;
-
- Map<GlobalStreamId,Long> __this__failed_copy_value = new HashMap<GlobalStreamId,Long>();
- for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__failed_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Long __this__failed_copy_value_copy_value = other_element_value_element_value;
-
- __this__failed_copy_value.put(__this__failed_copy_value_copy_key, __this__failed_copy_value_copy_value);
- }
-
- __this__failed.put(__this__failed_copy_key, __this__failed_copy_value);
- }
- this.failed = __this__failed;
- }
- if (other.is_set_process_ms_avg()) {
- Map<String,Map<GlobalStreamId,Double>> __this__process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.process_ms_avg.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Double> other_element_value = other_element.getValue();
-
- String __this__process_ms_avg_copy_key = other_element_key;
-
- Map<GlobalStreamId,Double> __this__process_ms_avg_copy_value = new HashMap<GlobalStreamId,Double>();
- for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__process_ms_avg_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Double __this__process_ms_avg_copy_value_copy_value = other_element_value_element_value;
-
- __this__process_ms_avg_copy_value.put(__this__process_ms_avg_copy_value_copy_key, __this__process_ms_avg_copy_value_copy_value);
- }
-
- __this__process_ms_avg.put(__this__process_ms_avg_copy_key, __this__process_ms_avg_copy_value);
- }
- this.process_ms_avg = __this__process_ms_avg;
- }
- if (other.is_set_executed()) {
- Map<String,Map<GlobalStreamId,Long>> __this__executed = new HashMap<String,Map<GlobalStreamId,Long>>();
- for (Map.Entry<String, Map<GlobalStreamId,Long>> other_element : other.executed.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Long> other_element_value = other_element.getValue();
-
- String __this__executed_copy_key = other_element_key;
-
- Map<GlobalStreamId,Long> __this__executed_copy_value = new HashMap<GlobalStreamId,Long>();
- for (Map.Entry<GlobalStreamId, Long> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Long other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__executed_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Long __this__executed_copy_value_copy_value = other_element_value_element_value;
-
- __this__executed_copy_value.put(__this__executed_copy_value_copy_key, __this__executed_copy_value_copy_value);
- }
-
- __this__executed.put(__this__executed_copy_key, __this__executed_copy_value);
- }
- this.executed = __this__executed;
- }
- if (other.is_set_execute_ms_avg()) {
- Map<String,Map<GlobalStreamId,Double>> __this__execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- for (Map.Entry<String, Map<GlobalStreamId,Double>> other_element : other.execute_ms_avg.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Map<GlobalStreamId,Double> other_element_value = other_element.getValue();
-
- String __this__execute_ms_avg_copy_key = other_element_key;
-
- Map<GlobalStreamId,Double> __this__execute_ms_avg_copy_value = new HashMap<GlobalStreamId,Double>();
- for (Map.Entry<GlobalStreamId, Double> other_element_value_element : other_element_value.entrySet()) {
-
- GlobalStreamId other_element_value_element_key = other_element_value_element.getKey();
- Double other_element_value_element_value = other_element_value_element.getValue();
-
- GlobalStreamId __this__execute_ms_avg_copy_value_copy_key = new GlobalStreamId(other_element_value_element_key);
-
- Double __this__execute_ms_avg_copy_value_copy_value = other_element_value_element_value;
-
- __this__execute_ms_avg_copy_value.put(__this__execute_ms_avg_copy_value_copy_key, __this__execute_ms_avg_copy_value_copy_value);
- }
-
- __this__execute_ms_avg.put(__this__execute_ms_avg_copy_key, __this__execute_ms_avg_copy_value);
- }
- this.execute_ms_avg = __this__execute_ms_avg;
- }
- }
-
- public BoltStats deepCopy() {
- return new BoltStats(this);
- }
-
- @Override
- public void clear() {
- this.acked = null;
- this.failed = null;
- this.process_ms_avg = null;
- this.executed = null;
- this.execute_ms_avg = null;
- }
-
- public int get_acked_size() {
- return (this.acked == null) ? 0 : this.acked.size();
- }
-
- public void put_to_acked(String key, Map<GlobalStreamId,Long> val) {
- if (this.acked == null) {
- this.acked = new HashMap<String,Map<GlobalStreamId,Long>>();
- }
- this.acked.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Long>> get_acked() {
- return this.acked;
- }
-
- public void set_acked(Map<String,Map<GlobalStreamId,Long>> acked) {
- this.acked = acked;
- }
-
- public void unset_acked() {
- this.acked = null;
- }
-
- /** Returns true if field acked is set (has been assigned a value) and false otherwise */
- public boolean is_set_acked() {
- return this.acked != null;
- }
-
- public void set_acked_isSet(boolean value) {
- if (!value) {
- this.acked = null;
- }
- }
-
- public int get_failed_size() {
- return (this.failed == null) ? 0 : this.failed.size();
- }
-
- public void put_to_failed(String key, Map<GlobalStreamId,Long> val) {
- if (this.failed == null) {
- this.failed = new HashMap<String,Map<GlobalStreamId,Long>>();
- }
- this.failed.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Long>> get_failed() {
- return this.failed;
- }
-
- public void set_failed(Map<String,Map<GlobalStreamId,Long>> failed) {
- this.failed = failed;
- }
-
- public void unset_failed() {
- this.failed = null;
- }
-
- /** Returns true if field failed is set (has been assigned a value) and false otherwise */
- public boolean is_set_failed() {
- return this.failed != null;
- }
-
- public void set_failed_isSet(boolean value) {
- if (!value) {
- this.failed = null;
- }
- }
-
- public int get_process_ms_avg_size() {
- return (this.process_ms_avg == null) ? 0 : this.process_ms_avg.size();
- }
-
- public void put_to_process_ms_avg(String key, Map<GlobalStreamId,Double> val) {
- if (this.process_ms_avg == null) {
- this.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- }
- this.process_ms_avg.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Double>> get_process_ms_avg() {
- return this.process_ms_avg;
- }
-
- public void set_process_ms_avg(Map<String,Map<GlobalStreamId,Double>> process_ms_avg) {
- this.process_ms_avg = process_ms_avg;
- }
-
- public void unset_process_ms_avg() {
- this.process_ms_avg = null;
- }
-
- /** Returns true if field process_ms_avg is set (has been assigned a value) and false otherwise */
- public boolean is_set_process_ms_avg() {
- return this.process_ms_avg != null;
- }
-
- public void set_process_ms_avg_isSet(boolean value) {
- if (!value) {
- this.process_ms_avg = null;
- }
- }
-
- public int get_executed_size() {
- return (this.executed == null) ? 0 : this.executed.size();
- }
-
- public void put_to_executed(String key, Map<GlobalStreamId,Long> val) {
- if (this.executed == null) {
- this.executed = new HashMap<String,Map<GlobalStreamId,Long>>();
- }
- this.executed.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Long>> get_executed() {
- return this.executed;
- }
-
- public void set_executed(Map<String,Map<GlobalStreamId,Long>> executed) {
- this.executed = executed;
- }
-
- public void unset_executed() {
- this.executed = null;
- }
-
- /** Returns true if field executed is set (has been assigned a value) and false otherwise */
- public boolean is_set_executed() {
- return this.executed != null;
- }
-
- public void set_executed_isSet(boolean value) {
- if (!value) {
- this.executed = null;
- }
- }
-
- public int get_execute_ms_avg_size() {
- return (this.execute_ms_avg == null) ? 0 : this.execute_ms_avg.size();
- }
-
- public void put_to_execute_ms_avg(String key, Map<GlobalStreamId,Double> val) {
- if (this.execute_ms_avg == null) {
- this.execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>();
- }
- this.execute_ms_avg.put(key, val);
- }
-
- public Map<String,Map<GlobalStreamId,Double>> get_execute_ms_avg() {
- return this.execute_ms_avg;
- }
-
- public void set_execute_ms_avg(Map<String,Map<GlobalStreamId,Double>> execute_ms_avg) {
- this.execute_ms_avg = execute_ms_avg;
- }
-
- public void unset_execute_ms_avg() {
- this.execute_ms_avg = null;
- }
-
- /** Returns true if field execute_ms_avg is set (has been assigned a value) and false otherwise */
- public boolean is_set_execute_ms_avg() {
- return this.execute_ms_avg != null;
- }
-
- public void set_execute_ms_avg_isSet(boolean value) {
- if (!value) {
- this.execute_ms_avg = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ACKED:
- if (value == null) {
- unset_acked();
- } else {
- set_acked((Map<String,Map<GlobalStreamId,Long>>)value);
- }
- break;
-
- case FAILED:
- if (value == null) {
- unset_failed();
- } else {
- set_failed((Map<String,Map<GlobalStreamId,Long>>)value);
- }
- break;
-
- case PROCESS_MS_AVG:
- if (value == null) {
- unset_process_ms_avg();
- } else {
- set_process_ms_avg((Map<String,Map<GlobalStreamId,Double>>)value);
- }
- break;
-
- case EXECUTED:
- if (value == null) {
- unset_executed();
- } else {
- set_executed((Map<String,Map<GlobalStreamId,Long>>)value);
- }
- break;
-
- case EXECUTE_MS_AVG:
- if (value == null) {
- unset_execute_ms_avg();
- } else {
- set_execute_ms_avg((Map<String,Map<GlobalStreamId,Double>>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ACKED:
- return get_acked();
-
- case FAILED:
- return get_failed();
-
- case PROCESS_MS_AVG:
- return get_process_ms_avg();
-
- case EXECUTED:
- return get_executed();
-
- case EXECUTE_MS_AVG:
- return get_execute_ms_avg();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ACKED:
- return is_set_acked();
- case FAILED:
- return is_set_failed();
- case PROCESS_MS_AVG:
- return is_set_process_ms_avg();
- case EXECUTED:
- return is_set_executed();
- case EXECUTE_MS_AVG:
- return is_set_execute_ms_avg();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof BoltStats)
- return this.equals((BoltStats)that);
- return false;
- }
-
- public boolean equals(BoltStats that) {
- if (that == null)
- return false;
-
- boolean this_present_acked = true && this.is_set_acked();
- boolean that_present_acked = true && that.is_set_acked();
- if (this_present_acked || that_present_acked) {
- if (!(this_present_acked && that_present_acked))
- return false;
- if (!this.acked.equals(that.acked))
- return false;
- }
-
- boolean this_present_failed = true && this.is_set_failed();
- boolean that_present_failed = true && that.is_set_failed();
- if (this_present_failed || that_present_failed) {
- if (!(this_present_failed && that_present_failed))
- return false;
- if (!this.failed.equals(that.failed))
- return false;
- }
-
- boolean this_present_process_ms_avg = true && this.is_set_process_ms_avg();
- boolean that_present_process_ms_avg = true && that.is_set_process_ms_avg();
- if (this_present_process_ms_avg || that_present_process_ms_avg) {
- if (!(this_present_process_ms_avg && that_present_process_ms_avg))
- return false;
- if (!this.process_ms_avg.equals(that.process_ms_avg))
- return false;
- }
-
- boolean this_present_executed = true && this.is_set_executed();
- boolean that_present_executed = true && that.is_set_executed();
- if (this_present_executed || that_present_executed) {
- if (!(this_present_executed && that_present_executed))
- return false;
- if (!this.executed.equals(that.executed))
- return false;
- }
-
- boolean this_present_execute_ms_avg = true && this.is_set_execute_ms_avg();
- boolean that_present_execute_ms_avg = true && that.is_set_execute_ms_avg();
- if (this_present_execute_ms_avg || that_present_execute_ms_avg) {
- if (!(this_present_execute_ms_avg && that_present_execute_ms_avg))
- return false;
- if (!this.execute_ms_avg.equals(that.execute_ms_avg))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_acked = true && (is_set_acked());
- builder.append(present_acked);
- if (present_acked)
- builder.append(acked);
-
- boolean present_failed = true && (is_set_failed());
- builder.append(present_failed);
- if (present_failed)
- builder.append(failed);
-
- boolean present_process_ms_avg = true && (is_set_process_ms_avg());
- builder.append(present_process_ms_avg);
- if (present_process_ms_avg)
- builder.append(process_ms_avg);
-
- boolean present_executed = true && (is_set_executed());
- builder.append(present_executed);
- if (present_executed)
- builder.append(executed);
-
- boolean present_execute_ms_avg = true && (is_set_execute_ms_avg());
- builder.append(present_execute_ms_avg);
- if (present_execute_ms_avg)
- builder.append(execute_ms_avg);
-
- return builder.toHashCode();
- }
-
- public int compareTo(BoltStats other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- BoltStats typedOther = (BoltStats)other;
-
- lastComparison = Boolean.valueOf(is_set_acked()).compareTo(typedOther.is_set_acked());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_acked()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.acked, typedOther.acked);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_failed()).compareTo(typedOther.is_set_failed());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_failed()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.failed, typedOther.failed);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_process_ms_avg()).compareTo(typedOther.is_set_process_ms_avg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_process_ms_avg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.process_ms_avg, typedOther.process_ms_avg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_executed()).compareTo(typedOther.is_set_executed());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_executed()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.executed, typedOther.executed);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_execute_ms_avg()).compareTo(typedOther.is_set_execute_ms_avg());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_execute_ms_avg()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.execute_ms_avg, typedOther.execute_ms_avg);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ACKED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map45 = iprot.readMapBegin();
- this.acked = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map45.size);
- for (int _i46 = 0; _i46 < _map45.size; ++_i46)
- {
- String _key47; // required
- Map<GlobalStreamId,Long> _val48; // required
- _key47 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map49 = iprot.readMapBegin();
- _val48 = new HashMap<GlobalStreamId,Long>(2*_map49.size);
- for (int _i50 = 0; _i50 < _map49.size; ++_i50)
- {
- GlobalStreamId _key51; // required
- long _val52; // required
- _key51 = new GlobalStreamId();
- _key51.read(iprot);
- _val52 = iprot.readI64();
- _val48.put(_key51, _val52);
- }
- iprot.readMapEnd();
- }
- this.acked.put(_key47, _val48);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // FAILED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map53 = iprot.readMapBegin();
- this.failed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map53.size);
- for (int _i54 = 0; _i54 < _map53.size; ++_i54)
- {
- String _key55; // required
- Map<GlobalStreamId,Long> _val56; // required
- _key55 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map57 = iprot.readMapBegin();
- _val56 = new HashMap<GlobalStreamId,Long>(2*_map57.size);
- for (int _i58 = 0; _i58 < _map57.size; ++_i58)
- {
- GlobalStreamId _key59; // required
- long _val60; // required
- _key59 = new GlobalStreamId();
- _key59.read(iprot);
- _val60 = iprot.readI64();
- _val56.put(_key59, _val60);
- }
- iprot.readMapEnd();
- }
- this.failed.put(_key55, _val56);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // PROCESS_MS_AVG
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map61 = iprot.readMapBegin();
- this.process_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map61.size);
- for (int _i62 = 0; _i62 < _map61.size; ++_i62)
- {
- String _key63; // required
- Map<GlobalStreamId,Double> _val64; // required
- _key63 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map65 = iprot.readMapBegin();
- _val64 = new HashMap<GlobalStreamId,Double>(2*_map65.size);
- for (int _i66 = 0; _i66 < _map65.size; ++_i66)
- {
- GlobalStreamId _key67; // required
- double _val68; // required
- _key67 = new GlobalStreamId();
- _key67.read(iprot);
- _val68 = iprot.readDouble();
- _val64.put(_key67, _val68);
- }
- iprot.readMapEnd();
- }
- this.process_ms_avg.put(_key63, _val64);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // EXECUTED
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map69 = iprot.readMapBegin();
- this.executed = new HashMap<String,Map<GlobalStreamId,Long>>(2*_map69.size);
- for (int _i70 = 0; _i70 < _map69.size; ++_i70)
- {
- String _key71; // required
- Map<GlobalStreamId,Long> _val72; // required
- _key71 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map73 = iprot.readMapBegin();
- _val72 = new HashMap<GlobalStreamId,Long>(2*_map73.size);
- for (int _i74 = 0; _i74 < _map73.size; ++_i74)
- {
- GlobalStreamId _key75; // required
- long _val76; // required
- _key75 = new GlobalStreamId();
- _key75.read(iprot);
- _val76 = iprot.readI64();
- _val72.put(_key75, _val76);
- }
- iprot.readMapEnd();
- }
- this.executed.put(_key71, _val72);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // EXECUTE_MS_AVG
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map77 = iprot.readMapBegin();
- this.execute_ms_avg = new HashMap<String,Map<GlobalStreamId,Double>>(2*_map77.size);
- for (int _i78 = 0; _i78 < _map77.size; ++_i78)
- {
- String _key79; // required
- Map<GlobalStreamId,Double> _val80; // required
- _key79 = iprot.readString();
- {
- org.apache.thrift7.protocol.TMap _map81 = iprot.readMapBegin();
- _val80 = new HashMap<GlobalStreamId,Double>(2*_map81.size);
- for (int _i82 = 0; _i82 < _map81.size; ++_i82)
- {
- GlobalStreamId _key83; // required
- double _val84; // required
- _key83 = new GlobalStreamId();
- _key83.read(iprot);
- _val84 = iprot.readDouble();
- _val80.put(_key83, _val84);
- }
- iprot.readMapEnd();
- }
- this.execute_ms_avg.put(_key79, _val80);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.acked != null) {
- oprot.writeFieldBegin(ACKED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.acked.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter85 : this.acked.entrySet())
- {
- oprot.writeString(_iter85.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.I64, _iter85.getValue().size()));
- for (Map.Entry<GlobalStreamId, Long> _iter86 : _iter85.getValue().entrySet())
- {
- _iter86.getKey().write(oprot);
- oprot.writeI64(_iter86.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.failed != null) {
- oprot.writeFieldBegin(FAILED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.failed.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter87 : this.failed.entrySet())
- {
- oprot.writeString(_iter87.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.I64, _iter87.getValue().size()));
- for (Map.Entry<GlobalStreamId, Long> _iter88 : _iter87.getValue().entrySet())
- {
- _iter88.getKey().write(oprot);
- oprot.writeI64(_iter88.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.process_ms_avg != null) {
- oprot.writeFieldBegin(PROCESS_MS_AVG_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.process_ms_avg.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter89 : this.process_ms_avg.entrySet())
- {
- oprot.writeString(_iter89.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.DOUBLE, _iter89.getValue().size()));
- for (Map.Entry<GlobalStreamId, Double> _iter90 : _iter89.getValue().entrySet())
- {
- _iter90.getKey().write(oprot);
- oprot.writeDouble(_iter90.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.executed != null) {
- oprot.writeFieldBegin(EXECUTED_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.executed.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Long>> _iter91 : this.executed.entrySet())
- {
- oprot.writeString(_iter91.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.I64, _iter91.getValue().size()));
- for (Map.Entry<GlobalStreamId, Long> _iter92 : _iter91.getValue().entrySet())
- {
- _iter92.getKey().write(oprot);
- oprot.writeI64(_iter92.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.execute_ms_avg != null) {
- oprot.writeFieldBegin(EXECUTE_MS_AVG_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.MAP, this.execute_ms_avg.size()));
- for (Map.Entry<String, Map<GlobalStreamId,Double>> _iter93 : this.execute_ms_avg.entrySet())
- {
- oprot.writeString(_iter93.getKey());
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRUCT, org.apache.thrift7.protocol.TType.DOUBLE, _iter93.getValue().size()));
- for (Map.Entry<GlobalStreamId, Double> _iter94 : _iter93.getValue().entrySet())
- {
- _iter94.getKey().write(oprot);
- oprot.writeDouble(_iter94.getValue());
- }
- oprot.writeMapEnd();
- }
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("BoltStats(");
- boolean first = true;
-
- sb.append("acked:");
- if (this.acked == null) {
- sb.append("null");
- } else {
- sb.append(this.acked);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("failed:");
- if (this.failed == null) {
- sb.append("null");
- } else {
- sb.append(this.failed);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("process_ms_avg:");
- if (this.process_ms_avg == null) {
- sb.append("null");
- } else {
- sb.append(this.process_ms_avg);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("executed:");
- if (this.executed == null) {
- sb.append("null");
- } else {
- sb.append(this.executed);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("execute_ms_avg:");
- if (this.execute_ms_avg == null) {
- sb.append("null");
- } else {
- sb.append(this.execute_ms_avg);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_acked()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'acked' is unset! Struct:" + toString());
- }
-
- if (!is_set_failed()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'failed' is unset! Struct:" + toString());
- }
-
- if (!is_set_process_ms_avg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'process_ms_avg' is unset! Struct:" + toString());
- }
-
- if (!is_set_executed()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'executed' is unset! Struct:" + toString());
- }
-
- if (!is_set_execute_ms_avg()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'execute_ms_avg' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/ClusterSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/ClusterSummary.java b/jstorm-client/src/main/java/backtype/storm/generated/ClusterSummary.java
deleted file mode 100644
index 9e42140..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/ClusterSummary.java
+++ /dev/null
@@ -1,693 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ClusterSummary implements org.apache.thrift7.TBase<ClusterSummary, ClusterSummary._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("ClusterSummary");
-
- private static final org.apache.thrift7.protocol.TField SUPERVISORS_FIELD_DESC = new org.apache.thrift7.protocol.TField("supervisors", org.apache.thrift7.protocol.TType.LIST, (short)1);
- private static final org.apache.thrift7.protocol.TField NIMBUS_UPTIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("nimbus_uptime_secs", org.apache.thrift7.protocol.TType.I32, (short)2);
- private static final org.apache.thrift7.protocol.TField TOPOLOGIES_FIELD_DESC = new org.apache.thrift7.protocol.TField("topologies", org.apache.thrift7.protocol.TType.LIST, (short)3);
- private static final org.apache.thrift7.protocol.TField VERSION_FIELD_DESC = new org.apache.thrift7.protocol.TField("version", org.apache.thrift7.protocol.TType.STRING, (short)4);
-
- private List<SupervisorSummary> supervisors; // required
- private int nimbus_uptime_secs; // required
- private List<TopologySummary> topologies; // required
- private String version; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SUPERVISORS((short)1, "supervisors"),
- NIMBUS_UPTIME_SECS((short)2, "nimbus_uptime_secs"),
- TOPOLOGIES((short)3, "topologies"),
- VERSION((short)4, "version");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // SUPERVISORS
- return SUPERVISORS;
- case 2: // NIMBUS_UPTIME_SECS
- return NIMBUS_UPTIME_SECS;
- case 3: // TOPOLOGIES
- return TOPOLOGIES;
- case 4: // VERSION
- return VERSION;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __NIMBUS_UPTIME_SECS_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUPERVISORS, new org.apache.thrift7.meta_data.FieldMetaData("supervisors", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, SupervisorSummary.class))));
- tmpMap.put(_Fields.NIMBUS_UPTIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("nimbus_uptime_secs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.TOPOLOGIES, new org.apache.thrift7.meta_data.FieldMetaData("topologies", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, TopologySummary.class))));
- tmpMap.put(_Fields.VERSION, new org.apache.thrift7.meta_data.FieldMetaData("version", org.apache.thrift7.TFieldRequirementType.OPTIONAL,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(ClusterSummary.class, metaDataMap);
- }
-
- public ClusterSummary() {
- }
-
- public ClusterSummary(
- List<SupervisorSummary> supervisors,
- int nimbus_uptime_secs,
- List<TopologySummary> topologies)
- {
- this();
- this.supervisors = supervisors;
- this.nimbus_uptime_secs = nimbus_uptime_secs;
- set_nimbus_uptime_secs_isSet(true);
- this.topologies = topologies;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public ClusterSummary(ClusterSummary other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_supervisors()) {
- List<SupervisorSummary> __this__supervisors = new ArrayList<SupervisorSummary>();
- for (SupervisorSummary other_element : other.supervisors) {
- __this__supervisors.add(new SupervisorSummary(other_element));
- }
- this.supervisors = __this__supervisors;
- }
- this.nimbus_uptime_secs = other.nimbus_uptime_secs;
- if (other.is_set_topologies()) {
- List<TopologySummary> __this__topologies = new ArrayList<TopologySummary>();
- for (TopologySummary other_element : other.topologies) {
- __this__topologies.add(new TopologySummary(other_element));
- }
- this.topologies = __this__topologies;
- }
- if (other.is_set_version()) {
- this.version = other.version;
- }
- }
-
- public ClusterSummary deepCopy() {
- return new ClusterSummary(this);
- }
-
- @Override
- public void clear() {
- this.supervisors = null;
- set_nimbus_uptime_secs_isSet(false);
- this.nimbus_uptime_secs = 0;
- this.topologies = null;
- this.version = null;
- }
-
- public int get_supervisors_size() {
- return (this.supervisors == null) ? 0 : this.supervisors.size();
- }
-
- public java.util.Iterator<SupervisorSummary> get_supervisors_iterator() {
- return (this.supervisors == null) ? null : this.supervisors.iterator();
- }
-
- public void add_to_supervisors(SupervisorSummary elem) {
- if (this.supervisors == null) {
- this.supervisors = new ArrayList<SupervisorSummary>();
- }
- this.supervisors.add(elem);
- }
-
- public List<SupervisorSummary> get_supervisors() {
- return this.supervisors;
- }
-
- public void set_supervisors(List<SupervisorSummary> supervisors) {
- this.supervisors = supervisors;
- }
-
- public void unset_supervisors() {
- this.supervisors = null;
- }
-
- /** Returns true if field supervisors is set (has been assigned a value) and false otherwise */
- public boolean is_set_supervisors() {
- return this.supervisors != null;
- }
-
- public void set_supervisors_isSet(boolean value) {
- if (!value) {
- this.supervisors = null;
- }
- }
-
- public int get_nimbus_uptime_secs() {
- return this.nimbus_uptime_secs;
- }
-
- public void set_nimbus_uptime_secs(int nimbus_uptime_secs) {
- this.nimbus_uptime_secs = nimbus_uptime_secs;
- set_nimbus_uptime_secs_isSet(true);
- }
-
- public void unset_nimbus_uptime_secs() {
- __isset_bit_vector.clear(__NIMBUS_UPTIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field nimbus_uptime_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_nimbus_uptime_secs() {
- return __isset_bit_vector.get(__NIMBUS_UPTIME_SECS_ISSET_ID);
- }
-
- public void set_nimbus_uptime_secs_isSet(boolean value) {
- __isset_bit_vector.set(__NIMBUS_UPTIME_SECS_ISSET_ID, value);
- }
-
- public int get_topologies_size() {
- return (this.topologies == null) ? 0 : this.topologies.size();
- }
-
- public java.util.Iterator<TopologySummary> get_topologies_iterator() {
- return (this.topologies == null) ? null : this.topologies.iterator();
- }
-
- public void add_to_topologies(TopologySummary elem) {
- if (this.topologies == null) {
- this.topologies = new ArrayList<TopologySummary>();
- }
- this.topologies.add(elem);
- }
-
- public List<TopologySummary> get_topologies() {
- return this.topologies;
- }
-
- public void set_topologies(List<TopologySummary> topologies) {
- this.topologies = topologies;
- }
-
- public void unset_topologies() {
- this.topologies = null;
- }
-
- /** Returns true if field topologies is set (has been assigned a value) and false otherwise */
- public boolean is_set_topologies() {
- return this.topologies != null;
- }
-
- public void set_topologies_isSet(boolean value) {
- if (!value) {
- this.topologies = null;
- }
- }
-
- public String get_version() {
- return this.version;
- }
-
- public void set_version(String version) {
- this.version = version;
- }
-
- public void unset_version() {
- this.version = null;
- }
-
- /** Returns true if field version is set (has been assigned a value) and false otherwise */
- public boolean is_set_version() {
- return this.version != null;
- }
-
- public void set_version_isSet(boolean value) {
- if (!value) {
- this.version = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SUPERVISORS:
- if (value == null) {
- unset_supervisors();
- } else {
- set_supervisors((List<SupervisorSummary>)value);
- }
- break;
-
- case NIMBUS_UPTIME_SECS:
- if (value == null) {
- unset_nimbus_uptime_secs();
- } else {
- set_nimbus_uptime_secs((Integer)value);
- }
- break;
-
- case TOPOLOGIES:
- if (value == null) {
- unset_topologies();
- } else {
- set_topologies((List<TopologySummary>)value);
- }
- break;
-
- case VERSION:
- if (value == null) {
- unset_version();
- } else {
- set_version((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SUPERVISORS:
- return get_supervisors();
-
- case NIMBUS_UPTIME_SECS:
- return Integer.valueOf(get_nimbus_uptime_secs());
-
- case TOPOLOGIES:
- return get_topologies();
-
- case VERSION:
- return get_version();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SUPERVISORS:
- return is_set_supervisors();
- case NIMBUS_UPTIME_SECS:
- return is_set_nimbus_uptime_secs();
- case TOPOLOGIES:
- return is_set_topologies();
- case VERSION:
- return is_set_version();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof ClusterSummary)
- return this.equals((ClusterSummary)that);
- return false;
- }
-
- public boolean equals(ClusterSummary that) {
- if (that == null)
- return false;
-
- boolean this_present_supervisors = true && this.is_set_supervisors();
- boolean that_present_supervisors = true && that.is_set_supervisors();
- if (this_present_supervisors || that_present_supervisors) {
- if (!(this_present_supervisors && that_present_supervisors))
- return false;
- if (!this.supervisors.equals(that.supervisors))
- return false;
- }
-
- boolean this_present_nimbus_uptime_secs = true;
- boolean that_present_nimbus_uptime_secs = true;
- if (this_present_nimbus_uptime_secs || that_present_nimbus_uptime_secs) {
- if (!(this_present_nimbus_uptime_secs && that_present_nimbus_uptime_secs))
- return false;
- if (this.nimbus_uptime_secs != that.nimbus_uptime_secs)
- return false;
- }
-
- boolean this_present_topologies = true && this.is_set_topologies();
- boolean that_present_topologies = true && that.is_set_topologies();
- if (this_present_topologies || that_present_topologies) {
- if (!(this_present_topologies && that_present_topologies))
- return false;
- if (!this.topologies.equals(that.topologies))
- return false;
- }
-
- boolean this_present_version = true && this.is_set_version();
- boolean that_present_version = true && that.is_set_version();
- if (this_present_version || that_present_version) {
- if (!(this_present_version && that_present_version))
- return false;
- if (!this.version.equals(that.version))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_supervisors = true && (is_set_supervisors());
- builder.append(present_supervisors);
- if (present_supervisors)
- builder.append(supervisors);
-
- boolean present_nimbus_uptime_secs = true;
- builder.append(present_nimbus_uptime_secs);
- if (present_nimbus_uptime_secs)
- builder.append(nimbus_uptime_secs);
-
- boolean present_topologies = true && (is_set_topologies());
- builder.append(present_topologies);
- if (present_topologies)
- builder.append(topologies);
-
- boolean present_version = true && (is_set_version());
- builder.append(present_version);
- if (present_version)
- builder.append(version);
-
- return builder.toHashCode();
- }
-
- public int compareTo(ClusterSummary other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- ClusterSummary typedOther = (ClusterSummary)other;
-
- lastComparison = Boolean.valueOf(is_set_supervisors()).compareTo(typedOther.is_set_supervisors());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_supervisors()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.supervisors, typedOther.supervisors);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_nimbus_uptime_secs()).compareTo(typedOther.is_set_nimbus_uptime_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_nimbus_uptime_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.nimbus_uptime_secs, typedOther.nimbus_uptime_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_topologies()).compareTo(typedOther.is_set_topologies());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_topologies()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.topologies, typedOther.topologies);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_version()).compareTo(typedOther.is_set_version());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_version()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.version, typedOther.version);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // SUPERVISORS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list37 = iprot.readListBegin();
- this.supervisors = new ArrayList<SupervisorSummary>(_list37.size);
- for (int _i38 = 0; _i38 < _list37.size; ++_i38)
- {
- SupervisorSummary _elem39; // required
- _elem39 = new SupervisorSummary();
- _elem39.read(iprot);
- this.supervisors.add(_elem39);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // NIMBUS_UPTIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.nimbus_uptime_secs = iprot.readI32();
- set_nimbus_uptime_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // TOPOLOGIES
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list40 = iprot.readListBegin();
- this.topologies = new ArrayList<TopologySummary>(_list40.size);
- for (int _i41 = 0; _i41 < _list40.size; ++_i41)
- {
- TopologySummary _elem42; // required
- _elem42 = new TopologySummary();
- _elem42.read(iprot);
- this.topologies.add(_elem42);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // VERSION
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.version = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.supervisors != null) {
- oprot.writeFieldBegin(SUPERVISORS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.supervisors.size()));
- for (SupervisorSummary _iter43 : this.supervisors)
- {
- _iter43.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(NIMBUS_UPTIME_SECS_FIELD_DESC);
- oprot.writeI32(this.nimbus_uptime_secs);
- oprot.writeFieldEnd();
- if (this.topologies != null) {
- oprot.writeFieldBegin(TOPOLOGIES_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.topologies.size()));
- for (TopologySummary _iter44 : this.topologies)
- {
- _iter44.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.version != null) {
- if (is_set_version()) {
- oprot.writeFieldBegin(VERSION_FIELD_DESC);
- oprot.writeString(this.version);
- oprot.writeFieldEnd();
- }
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("ClusterSummary(");
- boolean first = true;
-
- sb.append("supervisors:");
- if (this.supervisors == null) {
- sb.append("null");
- } else {
- sb.append(this.supervisors);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("nimbus_uptime_secs:");
- sb.append(this.nimbus_uptime_secs);
- first = false;
- if (!first) sb.append(", ");
- sb.append("topologies:");
- if (this.topologies == null) {
- sb.append("null");
- } else {
- sb.append(this.topologies);
- }
- first = false;
- if (is_set_version()) {
- if (!first) sb.append(", ");
- sb.append("version:");
- if (this.version == null) {
- sb.append("null");
- } else {
- sb.append(this.version);
- }
- first = false;
- }
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_supervisors()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'supervisors' is unset! Struct:" + toString());
- }
-
- if (!is_set_nimbus_uptime_secs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'nimbus_uptime_secs' is unset! Struct:" + toString());
- }
-
- if (!is_set_topologies()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'topologies' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[07/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/ClusterSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/ClusterSummary.java b/jstorm-core/src/main/java/backtype/storm/generated/ClusterSummary.java
new file mode 100644
index 0000000..1735b8a
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/ClusterSummary.java
@@ -0,0 +1,704 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class ClusterSummary implements org.apache.thrift.TBase<ClusterSummary, ClusterSummary._Fields>, java.io.Serializable, Cloneable, Comparable<ClusterSummary> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ClusterSummary");
+
+ private static final org.apache.thrift.protocol.TField NIMBUS_FIELD_DESC = new org.apache.thrift.protocol.TField("nimbus", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+ private static final org.apache.thrift.protocol.TField SUPERVISORS_FIELD_DESC = new org.apache.thrift.protocol.TField("supervisors", org.apache.thrift.protocol.TType.LIST, (short)2);
+ private static final org.apache.thrift.protocol.TField TOPOLOGIES_FIELD_DESC = new org.apache.thrift.protocol.TField("topologies", org.apache.thrift.protocol.TType.LIST, (short)3);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new ClusterSummaryStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new ClusterSummaryTupleSchemeFactory());
+ }
+
+ private NimbusSummary nimbus; // required
+ private List<SupervisorSummary> supervisors; // required
+ private List<TopologySummary> topologies; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ NIMBUS((short)1, "nimbus"),
+ SUPERVISORS((short)2, "supervisors"),
+ TOPOLOGIES((short)3, "topologies");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // NIMBUS
+ return NIMBUS;
+ case 2: // SUPERVISORS
+ return SUPERVISORS;
+ case 3: // TOPOLOGIES
+ return TOPOLOGIES;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.NIMBUS, new org.apache.thrift.meta_data.FieldMetaData("nimbus", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, NimbusSummary.class)));
+ tmpMap.put(_Fields.SUPERVISORS, new org.apache.thrift.meta_data.FieldMetaData("supervisors", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SupervisorSummary.class))));
+ tmpMap.put(_Fields.TOPOLOGIES, new org.apache.thrift.meta_data.FieldMetaData("topologies", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, TopologySummary.class))));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ClusterSummary.class, metaDataMap);
+ }
+
+ public ClusterSummary() {
+ }
+
+ public ClusterSummary(
+ NimbusSummary nimbus,
+ List<SupervisorSummary> supervisors,
+ List<TopologySummary> topologies)
+ {
+ this();
+ this.nimbus = nimbus;
+ this.supervisors = supervisors;
+ this.topologies = topologies;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public ClusterSummary(ClusterSummary other) {
+ if (other.is_set_nimbus()) {
+ this.nimbus = new NimbusSummary(other.nimbus);
+ }
+ if (other.is_set_supervisors()) {
+ List<SupervisorSummary> __this__supervisors = new ArrayList<SupervisorSummary>(other.supervisors.size());
+ for (SupervisorSummary other_element : other.supervisors) {
+ __this__supervisors.add(new SupervisorSummary(other_element));
+ }
+ this.supervisors = __this__supervisors;
+ }
+ if (other.is_set_topologies()) {
+ List<TopologySummary> __this__topologies = new ArrayList<TopologySummary>(other.topologies.size());
+ for (TopologySummary other_element : other.topologies) {
+ __this__topologies.add(new TopologySummary(other_element));
+ }
+ this.topologies = __this__topologies;
+ }
+ }
+
+ public ClusterSummary deepCopy() {
+ return new ClusterSummary(this);
+ }
+
+ @Override
+ public void clear() {
+ this.nimbus = null;
+ this.supervisors = null;
+ this.topologies = null;
+ }
+
+ public NimbusSummary get_nimbus() {
+ return this.nimbus;
+ }
+
+ public void set_nimbus(NimbusSummary nimbus) {
+ this.nimbus = nimbus;
+ }
+
+ public void unset_nimbus() {
+ this.nimbus = null;
+ }
+
+ /** Returns true if field nimbus is set (has been assigned a value) and false otherwise */
+ public boolean is_set_nimbus() {
+ return this.nimbus != null;
+ }
+
+ public void set_nimbus_isSet(boolean value) {
+ if (!value) {
+ this.nimbus = null;
+ }
+ }
+
+ public int get_supervisors_size() {
+ return (this.supervisors == null) ? 0 : this.supervisors.size();
+ }
+
+ public java.util.Iterator<SupervisorSummary> get_supervisors_iterator() {
+ return (this.supervisors == null) ? null : this.supervisors.iterator();
+ }
+
+ public void add_to_supervisors(SupervisorSummary elem) {
+ if (this.supervisors == null) {
+ this.supervisors = new ArrayList<SupervisorSummary>();
+ }
+ this.supervisors.add(elem);
+ }
+
+ public List<SupervisorSummary> get_supervisors() {
+ return this.supervisors;
+ }
+
+ public void set_supervisors(List<SupervisorSummary> supervisors) {
+ this.supervisors = supervisors;
+ }
+
+ public void unset_supervisors() {
+ this.supervisors = null;
+ }
+
+ /** Returns true if field supervisors is set (has been assigned a value) and false otherwise */
+ public boolean is_set_supervisors() {
+ return this.supervisors != null;
+ }
+
+ public void set_supervisors_isSet(boolean value) {
+ if (!value) {
+ this.supervisors = null;
+ }
+ }
+
+ public int get_topologies_size() {
+ return (this.topologies == null) ? 0 : this.topologies.size();
+ }
+
+ public java.util.Iterator<TopologySummary> get_topologies_iterator() {
+ return (this.topologies == null) ? null : this.topologies.iterator();
+ }
+
+ public void add_to_topologies(TopologySummary elem) {
+ if (this.topologies == null) {
+ this.topologies = new ArrayList<TopologySummary>();
+ }
+ this.topologies.add(elem);
+ }
+
+ public List<TopologySummary> get_topologies() {
+ return this.topologies;
+ }
+
+ public void set_topologies(List<TopologySummary> topologies) {
+ this.topologies = topologies;
+ }
+
+ public void unset_topologies() {
+ this.topologies = null;
+ }
+
+ /** Returns true if field topologies is set (has been assigned a value) and false otherwise */
+ public boolean is_set_topologies() {
+ return this.topologies != null;
+ }
+
+ public void set_topologies_isSet(boolean value) {
+ if (!value) {
+ this.topologies = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case NIMBUS:
+ if (value == null) {
+ unset_nimbus();
+ } else {
+ set_nimbus((NimbusSummary)value);
+ }
+ break;
+
+ case SUPERVISORS:
+ if (value == null) {
+ unset_supervisors();
+ } else {
+ set_supervisors((List<SupervisorSummary>)value);
+ }
+ break;
+
+ case TOPOLOGIES:
+ if (value == null) {
+ unset_topologies();
+ } else {
+ set_topologies((List<TopologySummary>)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case NIMBUS:
+ return get_nimbus();
+
+ case SUPERVISORS:
+ return get_supervisors();
+
+ case TOPOLOGIES:
+ return get_topologies();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case NIMBUS:
+ return is_set_nimbus();
+ case SUPERVISORS:
+ return is_set_supervisors();
+ case TOPOLOGIES:
+ return is_set_topologies();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof ClusterSummary)
+ return this.equals((ClusterSummary)that);
+ return false;
+ }
+
+ public boolean equals(ClusterSummary that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_nimbus = true && this.is_set_nimbus();
+ boolean that_present_nimbus = true && that.is_set_nimbus();
+ if (this_present_nimbus || that_present_nimbus) {
+ if (!(this_present_nimbus && that_present_nimbus))
+ return false;
+ if (!this.nimbus.equals(that.nimbus))
+ return false;
+ }
+
+ boolean this_present_supervisors = true && this.is_set_supervisors();
+ boolean that_present_supervisors = true && that.is_set_supervisors();
+ if (this_present_supervisors || that_present_supervisors) {
+ if (!(this_present_supervisors && that_present_supervisors))
+ return false;
+ if (!this.supervisors.equals(that.supervisors))
+ return false;
+ }
+
+ boolean this_present_topologies = true && this.is_set_topologies();
+ boolean that_present_topologies = true && that.is_set_topologies();
+ if (this_present_topologies || that_present_topologies) {
+ if (!(this_present_topologies && that_present_topologies))
+ return false;
+ if (!this.topologies.equals(that.topologies))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_nimbus = true && (is_set_nimbus());
+ list.add(present_nimbus);
+ if (present_nimbus)
+ list.add(nimbus);
+
+ boolean present_supervisors = true && (is_set_supervisors());
+ list.add(present_supervisors);
+ if (present_supervisors)
+ list.add(supervisors);
+
+ boolean present_topologies = true && (is_set_topologies());
+ list.add(present_topologies);
+ if (present_topologies)
+ list.add(topologies);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(ClusterSummary other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_nimbus()).compareTo(other.is_set_nimbus());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_nimbus()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.nimbus, other.nimbus);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_supervisors()).compareTo(other.is_set_supervisors());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_supervisors()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.supervisors, other.supervisors);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_topologies()).compareTo(other.is_set_topologies());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_topologies()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.topologies, other.topologies);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ClusterSummary(");
+ boolean first = true;
+
+ sb.append("nimbus:");
+ if (this.nimbus == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.nimbus);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("supervisors:");
+ if (this.supervisors == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.supervisors);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("topologies:");
+ if (this.topologies == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.topologies);
+ }
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_nimbus()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'nimbus' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_supervisors()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'supervisors' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_topologies()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'topologies' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ if (nimbus != null) {
+ nimbus.validate();
+ }
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class ClusterSummaryStandardSchemeFactory implements SchemeFactory {
+ public ClusterSummaryStandardScheme getScheme() {
+ return new ClusterSummaryStandardScheme();
+ }
+ }
+
+ private static class ClusterSummaryStandardScheme extends StandardScheme<ClusterSummary> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ClusterSummary struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // NIMBUS
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.nimbus = new NimbusSummary();
+ struct.nimbus.read(iprot);
+ struct.set_nimbus_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // SUPERVISORS
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list82 = iprot.readListBegin();
+ struct.supervisors = new ArrayList<SupervisorSummary>(_list82.size);
+ SupervisorSummary _elem83;
+ for (int _i84 = 0; _i84 < _list82.size; ++_i84)
+ {
+ _elem83 = new SupervisorSummary();
+ _elem83.read(iprot);
+ struct.supervisors.add(_elem83);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_supervisors_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // TOPOLOGIES
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list85 = iprot.readListBegin();
+ struct.topologies = new ArrayList<TopologySummary>(_list85.size);
+ TopologySummary _elem86;
+ for (int _i87 = 0; _i87 < _list85.size; ++_i87)
+ {
+ _elem86 = new TopologySummary();
+ _elem86.read(iprot);
+ struct.topologies.add(_elem86);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_topologies_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ClusterSummary struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.nimbus != null) {
+ oprot.writeFieldBegin(NIMBUS_FIELD_DESC);
+ struct.nimbus.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ if (struct.supervisors != null) {
+ oprot.writeFieldBegin(SUPERVISORS_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.supervisors.size()));
+ for (SupervisorSummary _iter88 : struct.supervisors)
+ {
+ _iter88.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.topologies != null) {
+ oprot.writeFieldBegin(TOPOLOGIES_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.topologies.size()));
+ for (TopologySummary _iter89 : struct.topologies)
+ {
+ _iter89.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class ClusterSummaryTupleSchemeFactory implements SchemeFactory {
+ public ClusterSummaryTupleScheme getScheme() {
+ return new ClusterSummaryTupleScheme();
+ }
+ }
+
+ private static class ClusterSummaryTupleScheme extends TupleScheme<ClusterSummary> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, ClusterSummary struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ struct.nimbus.write(oprot);
+ {
+ oprot.writeI32(struct.supervisors.size());
+ for (SupervisorSummary _iter90 : struct.supervisors)
+ {
+ _iter90.write(oprot);
+ }
+ }
+ {
+ oprot.writeI32(struct.topologies.size());
+ for (TopologySummary _iter91 : struct.topologies)
+ {
+ _iter91.write(oprot);
+ }
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, ClusterSummary struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.nimbus = new NimbusSummary();
+ struct.nimbus.read(iprot);
+ struct.set_nimbus_isSet(true);
+ {
+ org.apache.thrift.protocol.TList _list92 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.supervisors = new ArrayList<SupervisorSummary>(_list92.size);
+ SupervisorSummary _elem93;
+ for (int _i94 = 0; _i94 < _list92.size; ++_i94)
+ {
+ _elem93 = new SupervisorSummary();
+ _elem93.read(iprot);
+ struct.supervisors.add(_elem93);
+ }
+ }
+ struct.set_supervisors_isSet(true);
+ {
+ org.apache.thrift.protocol.TList _list95 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.topologies = new ArrayList<TopologySummary>(_list95.size);
+ TopologySummary _elem96;
+ for (int _i97 = 0; _i97 < _list95.size; ++_i97)
+ {
+ _elem96 = new TopologySummary();
+ _elem96.read(iprot);
+ struct.topologies.add(_elem96);
+ }
+ }
+ struct.set_topologies_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/ComponentCommon.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/ComponentCommon.java b/jstorm-core/src/main/java/backtype/storm/generated/ComponentCommon.java
new file mode 100644
index 0000000..0a98a62
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/ComponentCommon.java
@@ -0,0 +1,835 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.2)", date = "2015-7-27")
+public class ComponentCommon implements org.apache.thrift.TBase<ComponentCommon, ComponentCommon._Fields>, java.io.Serializable, Cloneable, Comparable<ComponentCommon> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentCommon");
+
+ private static final org.apache.thrift.protocol.TField INPUTS_FIELD_DESC = new org.apache.thrift.protocol.TField("inputs", org.apache.thrift.protocol.TType.MAP, (short)1);
+ private static final org.apache.thrift.protocol.TField STREAMS_FIELD_DESC = new org.apache.thrift.protocol.TField("streams", org.apache.thrift.protocol.TType.MAP, (short)2);
+ private static final org.apache.thrift.protocol.TField PARALLELISM_HINT_FIELD_DESC = new org.apache.thrift.protocol.TField("parallelism_hint", org.apache.thrift.protocol.TType.I32, (short)3);
+ private static final org.apache.thrift.protocol.TField JSON_CONF_FIELD_DESC = new org.apache.thrift.protocol.TField("json_conf", org.apache.thrift.protocol.TType.STRING, (short)4);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new ComponentCommonStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new ComponentCommonTupleSchemeFactory());
+ }
+
+ private Map<GlobalStreamId,Grouping> inputs; // required
+ private Map<String,StreamInfo> streams; // required
+ private int parallelism_hint; // optional
+ private String json_conf; // optional
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ INPUTS((short)1, "inputs"),
+ STREAMS((short)2, "streams"),
+ PARALLELISM_HINT((short)3, "parallelism_hint"),
+ JSON_CONF((short)4, "json_conf");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // INPUTS
+ return INPUTS;
+ case 2: // STREAMS
+ return STREAMS;
+ case 3: // PARALLELISM_HINT
+ return PARALLELISM_HINT;
+ case 4: // JSON_CONF
+ return JSON_CONF;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __PARALLELISM_HINT_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ private static final _Fields optionals[] = {_Fields.PARALLELISM_HINT,_Fields.JSON_CONF};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.INPUTS, new org.apache.thrift.meta_data.FieldMetaData("inputs", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, GlobalStreamId.class),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, Grouping.class))));
+ tmpMap.put(_Fields.STREAMS, new org.apache.thrift.meta_data.FieldMetaData("streams", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING),
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, StreamInfo.class))));
+ tmpMap.put(_Fields.PARALLELISM_HINT, new org.apache.thrift.meta_data.FieldMetaData("parallelism_hint", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ tmpMap.put(_Fields.JSON_CONF, new org.apache.thrift.meta_data.FieldMetaData("json_conf", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ComponentCommon.class, metaDataMap);
+ }
+
+ public ComponentCommon() {
+ }
+
+ public ComponentCommon(
+ Map<GlobalStreamId,Grouping> inputs,
+ Map<String,StreamInfo> streams)
+ {
+ this();
+ this.inputs = inputs;
+ this.streams = streams;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public ComponentCommon(ComponentCommon other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.is_set_inputs()) {
+ Map<GlobalStreamId,Grouping> __this__inputs = new HashMap<GlobalStreamId,Grouping>(other.inputs.size());
+ for (Map.Entry<GlobalStreamId, Grouping> other_element : other.inputs.entrySet()) {
+
+ GlobalStreamId other_element_key = other_element.getKey();
+ Grouping other_element_value = other_element.getValue();
+
+ GlobalStreamId __this__inputs_copy_key = new GlobalStreamId(other_element_key);
+
+ Grouping __this__inputs_copy_value = new Grouping(other_element_value);
+
+ __this__inputs.put(__this__inputs_copy_key, __this__inputs_copy_value);
+ }
+ this.inputs = __this__inputs;
+ }
+ if (other.is_set_streams()) {
+ Map<String,StreamInfo> __this__streams = new HashMap<String,StreamInfo>(other.streams.size());
+ for (Map.Entry<String, StreamInfo> other_element : other.streams.entrySet()) {
+
+ String other_element_key = other_element.getKey();
+ StreamInfo other_element_value = other_element.getValue();
+
+ String __this__streams_copy_key = other_element_key;
+
+ StreamInfo __this__streams_copy_value = new StreamInfo(other_element_value);
+
+ __this__streams.put(__this__streams_copy_key, __this__streams_copy_value);
+ }
+ this.streams = __this__streams;
+ }
+ this.parallelism_hint = other.parallelism_hint;
+ if (other.is_set_json_conf()) {
+ this.json_conf = other.json_conf;
+ }
+ }
+
+ public ComponentCommon deepCopy() {
+ return new ComponentCommon(this);
+ }
+
+ @Override
+ public void clear() {
+ this.inputs = null;
+ this.streams = null;
+ set_parallelism_hint_isSet(false);
+ this.parallelism_hint = 0;
+ this.json_conf = null;
+ }
+
+ public int get_inputs_size() {
+ return (this.inputs == null) ? 0 : this.inputs.size();
+ }
+
+ public void put_to_inputs(GlobalStreamId key, Grouping val) {
+ if (this.inputs == null) {
+ this.inputs = new HashMap<GlobalStreamId,Grouping>();
+ }
+ this.inputs.put(key, val);
+ }
+
+ public Map<GlobalStreamId,Grouping> get_inputs() {
+ return this.inputs;
+ }
+
+ public void set_inputs(Map<GlobalStreamId,Grouping> inputs) {
+ this.inputs = inputs;
+ }
+
+ public void unset_inputs() {
+ this.inputs = null;
+ }
+
+ /** Returns true if field inputs is set (has been assigned a value) and false otherwise */
+ public boolean is_set_inputs() {
+ return this.inputs != null;
+ }
+
+ public void set_inputs_isSet(boolean value) {
+ if (!value) {
+ this.inputs = null;
+ }
+ }
+
+ public int get_streams_size() {
+ return (this.streams == null) ? 0 : this.streams.size();
+ }
+
+ public void put_to_streams(String key, StreamInfo val) {
+ if (this.streams == null) {
+ this.streams = new HashMap<String,StreamInfo>();
+ }
+ this.streams.put(key, val);
+ }
+
+ public Map<String,StreamInfo> get_streams() {
+ return this.streams;
+ }
+
+ public void set_streams(Map<String,StreamInfo> streams) {
+ this.streams = streams;
+ }
+
+ public void unset_streams() {
+ this.streams = null;
+ }
+
+ /** Returns true if field streams is set (has been assigned a value) and false otherwise */
+ public boolean is_set_streams() {
+ return this.streams != null;
+ }
+
+ public void set_streams_isSet(boolean value) {
+ if (!value) {
+ this.streams = null;
+ }
+ }
+
+ public int get_parallelism_hint() {
+ return this.parallelism_hint;
+ }
+
+ public void set_parallelism_hint(int parallelism_hint) {
+ this.parallelism_hint = parallelism_hint;
+ set_parallelism_hint_isSet(true);
+ }
+
+ public void unset_parallelism_hint() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __PARALLELISM_HINT_ISSET_ID);
+ }
+
+ /** Returns true if field parallelism_hint is set (has been assigned a value) and false otherwise */
+ public boolean is_set_parallelism_hint() {
+ return EncodingUtils.testBit(__isset_bitfield, __PARALLELISM_HINT_ISSET_ID);
+ }
+
+ public void set_parallelism_hint_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __PARALLELISM_HINT_ISSET_ID, value);
+ }
+
+ public String get_json_conf() {
+ return this.json_conf;
+ }
+
+ public void set_json_conf(String json_conf) {
+ this.json_conf = json_conf;
+ }
+
+ public void unset_json_conf() {
+ this.json_conf = null;
+ }
+
+ /** Returns true if field json_conf is set (has been assigned a value) and false otherwise */
+ public boolean is_set_json_conf() {
+ return this.json_conf != null;
+ }
+
+ public void set_json_conf_isSet(boolean value) {
+ if (!value) {
+ this.json_conf = null;
+ }
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case INPUTS:
+ if (value == null) {
+ unset_inputs();
+ } else {
+ set_inputs((Map<GlobalStreamId,Grouping>)value);
+ }
+ break;
+
+ case STREAMS:
+ if (value == null) {
+ unset_streams();
+ } else {
+ set_streams((Map<String,StreamInfo>)value);
+ }
+ break;
+
+ case PARALLELISM_HINT:
+ if (value == null) {
+ unset_parallelism_hint();
+ } else {
+ set_parallelism_hint((Integer)value);
+ }
+ break;
+
+ case JSON_CONF:
+ if (value == null) {
+ unset_json_conf();
+ } else {
+ set_json_conf((String)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case INPUTS:
+ return get_inputs();
+
+ case STREAMS:
+ return get_streams();
+
+ case PARALLELISM_HINT:
+ return Integer.valueOf(get_parallelism_hint());
+
+ case JSON_CONF:
+ return get_json_conf();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case INPUTS:
+ return is_set_inputs();
+ case STREAMS:
+ return is_set_streams();
+ case PARALLELISM_HINT:
+ return is_set_parallelism_hint();
+ case JSON_CONF:
+ return is_set_json_conf();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof ComponentCommon)
+ return this.equals((ComponentCommon)that);
+ return false;
+ }
+
+ public boolean equals(ComponentCommon that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_inputs = true && this.is_set_inputs();
+ boolean that_present_inputs = true && that.is_set_inputs();
+ if (this_present_inputs || that_present_inputs) {
+ if (!(this_present_inputs && that_present_inputs))
+ return false;
+ if (!this.inputs.equals(that.inputs))
+ return false;
+ }
+
+ boolean this_present_streams = true && this.is_set_streams();
+ boolean that_present_streams = true && that.is_set_streams();
+ if (this_present_streams || that_present_streams) {
+ if (!(this_present_streams && that_present_streams))
+ return false;
+ if (!this.streams.equals(that.streams))
+ return false;
+ }
+
+ boolean this_present_parallelism_hint = true && this.is_set_parallelism_hint();
+ boolean that_present_parallelism_hint = true && that.is_set_parallelism_hint();
+ if (this_present_parallelism_hint || that_present_parallelism_hint) {
+ if (!(this_present_parallelism_hint && that_present_parallelism_hint))
+ return false;
+ if (this.parallelism_hint != that.parallelism_hint)
+ return false;
+ }
+
+ boolean this_present_json_conf = true && this.is_set_json_conf();
+ boolean that_present_json_conf = true && that.is_set_json_conf();
+ if (this_present_json_conf || that_present_json_conf) {
+ if (!(this_present_json_conf && that_present_json_conf))
+ return false;
+ if (!this.json_conf.equals(that.json_conf))
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_inputs = true && (is_set_inputs());
+ list.add(present_inputs);
+ if (present_inputs)
+ list.add(inputs);
+
+ boolean present_streams = true && (is_set_streams());
+ list.add(present_streams);
+ if (present_streams)
+ list.add(streams);
+
+ boolean present_parallelism_hint = true && (is_set_parallelism_hint());
+ list.add(present_parallelism_hint);
+ if (present_parallelism_hint)
+ list.add(parallelism_hint);
+
+ boolean present_json_conf = true && (is_set_json_conf());
+ list.add(present_json_conf);
+ if (present_json_conf)
+ list.add(json_conf);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(ComponentCommon other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_inputs()).compareTo(other.is_set_inputs());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_inputs()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.inputs, other.inputs);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_streams()).compareTo(other.is_set_streams());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_streams()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.streams, other.streams);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_parallelism_hint()).compareTo(other.is_set_parallelism_hint());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_parallelism_hint()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.parallelism_hint, other.parallelism_hint);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_json_conf()).compareTo(other.is_set_json_conf());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_json_conf()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.json_conf, other.json_conf);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ComponentCommon(");
+ boolean first = true;
+
+ sb.append("inputs:");
+ if (this.inputs == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.inputs);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("streams:");
+ if (this.streams == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.streams);
+ }
+ first = false;
+ if (is_set_parallelism_hint()) {
+ if (!first) sb.append(", ");
+ sb.append("parallelism_hint:");
+ sb.append(this.parallelism_hint);
+ first = false;
+ }
+ if (is_set_json_conf()) {
+ if (!first) sb.append(", ");
+ sb.append("json_conf:");
+ if (this.json_conf == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.json_conf);
+ }
+ first = false;
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_inputs()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'inputs' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_streams()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'streams' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class ComponentCommonStandardSchemeFactory implements SchemeFactory {
+ public ComponentCommonStandardScheme getScheme() {
+ return new ComponentCommonStandardScheme();
+ }
+ }
+
+ private static class ComponentCommonStandardScheme extends StandardScheme<ComponentCommon> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ComponentCommon struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // INPUTS
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map24 = iprot.readMapBegin();
+ struct.inputs = new HashMap<GlobalStreamId,Grouping>(2*_map24.size);
+ GlobalStreamId _key25;
+ Grouping _val26;
+ for (int _i27 = 0; _i27 < _map24.size; ++_i27)
+ {
+ _key25 = new GlobalStreamId();
+ _key25.read(iprot);
+ _val26 = new Grouping();
+ _val26.read(iprot);
+ struct.inputs.put(_key25, _val26);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_inputs_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // STREAMS
+ if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
+ {
+ org.apache.thrift.protocol.TMap _map28 = iprot.readMapBegin();
+ struct.streams = new HashMap<String,StreamInfo>(2*_map28.size);
+ String _key29;
+ StreamInfo _val30;
+ for (int _i31 = 0; _i31 < _map28.size; ++_i31)
+ {
+ _key29 = iprot.readString();
+ _val30 = new StreamInfo();
+ _val30.read(iprot);
+ struct.streams.put(_key29, _val30);
+ }
+ iprot.readMapEnd();
+ }
+ struct.set_streams_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 3: // PARALLELISM_HINT
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.parallelism_hint = iprot.readI32();
+ struct.set_parallelism_hint_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 4: // JSON_CONF
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRING) {
+ struct.json_conf = iprot.readString();
+ struct.set_json_conf_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ComponentCommon struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.inputs != null) {
+ oprot.writeFieldBegin(INPUTS_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.STRUCT, struct.inputs.size()));
+ for (Map.Entry<GlobalStreamId, Grouping> _iter32 : struct.inputs.entrySet())
+ {
+ _iter32.getKey().write(oprot);
+ _iter32.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.streams != null) {
+ oprot.writeFieldBegin(STREAMS_FIELD_DESC);
+ {
+ oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.streams.size()));
+ for (Map.Entry<String, StreamInfo> _iter33 : struct.streams.entrySet())
+ {
+ oprot.writeString(_iter33.getKey());
+ _iter33.getValue().write(oprot);
+ }
+ oprot.writeMapEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.is_set_parallelism_hint()) {
+ oprot.writeFieldBegin(PARALLELISM_HINT_FIELD_DESC);
+ oprot.writeI32(struct.parallelism_hint);
+ oprot.writeFieldEnd();
+ }
+ if (struct.json_conf != null) {
+ if (struct.is_set_json_conf()) {
+ oprot.writeFieldBegin(JSON_CONF_FIELD_DESC);
+ oprot.writeString(struct.json_conf);
+ oprot.writeFieldEnd();
+ }
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class ComponentCommonTupleSchemeFactory implements SchemeFactory {
+ public ComponentCommonTupleScheme getScheme() {
+ return new ComponentCommonTupleScheme();
+ }
+ }
+
+ private static class ComponentCommonTupleScheme extends TupleScheme<ComponentCommon> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, ComponentCommon struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ {
+ oprot.writeI32(struct.inputs.size());
+ for (Map.Entry<GlobalStreamId, Grouping> _iter34 : struct.inputs.entrySet())
+ {
+ _iter34.getKey().write(oprot);
+ _iter34.getValue().write(oprot);
+ }
+ }
+ {
+ oprot.writeI32(struct.streams.size());
+ for (Map.Entry<String, StreamInfo> _iter35 : struct.streams.entrySet())
+ {
+ oprot.writeString(_iter35.getKey());
+ _iter35.getValue().write(oprot);
+ }
+ }
+ BitSet optionals = new BitSet();
+ if (struct.is_set_parallelism_hint()) {
+ optionals.set(0);
+ }
+ if (struct.is_set_json_conf()) {
+ optionals.set(1);
+ }
+ oprot.writeBitSet(optionals, 2);
+ if (struct.is_set_parallelism_hint()) {
+ oprot.writeI32(struct.parallelism_hint);
+ }
+ if (struct.is_set_json_conf()) {
+ oprot.writeString(struct.json_conf);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, ComponentCommon struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ {
+ org.apache.thrift.protocol.TMap _map36 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRUCT, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.inputs = new HashMap<GlobalStreamId,Grouping>(2*_map36.size);
+ GlobalStreamId _key37;
+ Grouping _val38;
+ for (int _i39 = 0; _i39 < _map36.size; ++_i39)
+ {
+ _key37 = new GlobalStreamId();
+ _key37.read(iprot);
+ _val38 = new Grouping();
+ _val38.read(iprot);
+ struct.inputs.put(_key37, _val38);
+ }
+ }
+ struct.set_inputs_isSet(true);
+ {
+ org.apache.thrift.protocol.TMap _map40 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.streams = new HashMap<String,StreamInfo>(2*_map40.size);
+ String _key41;
+ StreamInfo _val42;
+ for (int _i43 = 0; _i43 < _map40.size; ++_i43)
+ {
+ _key41 = iprot.readString();
+ _val42 = new StreamInfo();
+ _val42.read(iprot);
+ struct.streams.put(_key41, _val42);
+ }
+ }
+ struct.set_streams_isSet(true);
+ BitSet incoming = iprot.readBitSet(2);
+ if (incoming.get(0)) {
+ struct.parallelism_hint = iprot.readI32();
+ struct.set_parallelism_hint_isSet(true);
+ }
+ if (incoming.get(1)) {
+ struct.json_conf = iprot.readString();
+ struct.set_json_conf_isSet(true);
+ }
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/generated/ComponentObject.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/generated/ComponentObject.java b/jstorm-core/src/main/java/backtype/storm/generated/ComponentObject.java
new file mode 100755
index 0000000..ab32225
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/generated/ComponentObject.java
@@ -0,0 +1,445 @@
+/**
+ * Autogenerated by Thrift Compiler (0.9.2)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+public class ComponentObject extends org.apache.thrift.TUnion<ComponentObject, ComponentObject._Fields> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ComponentObject");
+ private static final org.apache.thrift.protocol.TField SERIALIZED_JAVA_FIELD_DESC = new org.apache.thrift.protocol.TField("serialized_java", org.apache.thrift.protocol.TType.STRING, (short)1);
+ private static final org.apache.thrift.protocol.TField SHELL_FIELD_DESC = new org.apache.thrift.protocol.TField("shell", org.apache.thrift.protocol.TType.STRUCT, (short)2);
+ private static final org.apache.thrift.protocol.TField JAVA_OBJECT_FIELD_DESC = new org.apache.thrift.protocol.TField("java_object", org.apache.thrift.protocol.TType.STRUCT, (short)3);
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ SERIALIZED_JAVA((short)1, "serialized_java"),
+ SHELL((short)2, "shell"),
+ JAVA_OBJECT((short)3, "java_object");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // SERIALIZED_JAVA
+ return SERIALIZED_JAVA;
+ case 2: // SHELL
+ return SHELL;
+ case 3: // JAVA_OBJECT
+ return JAVA_OBJECT;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.SERIALIZED_JAVA, new org.apache.thrift.meta_data.FieldMetaData("serialized_java", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING , true)));
+ tmpMap.put(_Fields.SHELL, new org.apache.thrift.meta_data.FieldMetaData("shell", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, ShellComponent.class)));
+ tmpMap.put(_Fields.JAVA_OBJECT, new org.apache.thrift.meta_data.FieldMetaData("java_object", org.apache.thrift.TFieldRequirementType.DEFAULT,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, JavaObject.class)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ComponentObject.class, metaDataMap);
+ }
+
+ public ComponentObject() {
+ super();
+ }
+
+ public ComponentObject(_Fields setField, Object value) {
+ super(setField, value);
+ }
+
+ public ComponentObject(ComponentObject other) {
+ super(other);
+ }
+ public ComponentObject deepCopy() {
+ return new ComponentObject(this);
+ }
+
+ public static ComponentObject serialized_java(ByteBuffer value) {
+ ComponentObject x = new ComponentObject();
+ x.set_serialized_java(value);
+ return x;
+ }
+
+ public static ComponentObject serialized_java(byte[] value) {
+ ComponentObject x = new ComponentObject();
+ x.set_serialized_java(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ return x;
+ }
+
+ public static ComponentObject shell(ShellComponent value) {
+ ComponentObject x = new ComponentObject();
+ x.set_shell(value);
+ return x;
+ }
+
+ public static ComponentObject java_object(JavaObject value) {
+ ComponentObject x = new ComponentObject();
+ x.set_java_object(value);
+ return x;
+ }
+
+
+ @Override
+ protected void checkType(_Fields setField, Object value) throws ClassCastException {
+ switch (setField) {
+ case SERIALIZED_JAVA:
+ if (value instanceof ByteBuffer) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type ByteBuffer for field 'serialized_java', but got " + value.getClass().getSimpleName());
+ case SHELL:
+ if (value instanceof ShellComponent) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type ShellComponent for field 'shell', but got " + value.getClass().getSimpleName());
+ case JAVA_OBJECT:
+ if (value instanceof JavaObject) {
+ break;
+ }
+ throw new ClassCastException("Was expecting value of type JavaObject for field 'java_object', but got " + value.getClass().getSimpleName());
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected Object standardSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, org.apache.thrift.protocol.TField field) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(field.id);
+ if (setField != null) {
+ switch (setField) {
+ case SERIALIZED_JAVA:
+ if (field.type == SERIALIZED_JAVA_FIELD_DESC.type) {
+ ByteBuffer serialized_java;
+ serialized_java = iprot.readBinary();
+ return serialized_java;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case SHELL:
+ if (field.type == SHELL_FIELD_DESC.type) {
+ ShellComponent shell;
+ shell = new ShellComponent();
+ shell.read(iprot);
+ return shell;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ case JAVA_OBJECT:
+ if (field.type == JAVA_OBJECT_FIELD_DESC.type) {
+ JavaObject java_object;
+ java_object = new JavaObject();
+ java_object.read(iprot);
+ return java_object;
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, field.type);
+ return null;
+ }
+ }
+
+ @Override
+ protected void standardSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case SERIALIZED_JAVA:
+ ByteBuffer serialized_java = (ByteBuffer)value_;
+ oprot.writeBinary(serialized_java);
+ return;
+ case SHELL:
+ ShellComponent shell = (ShellComponent)value_;
+ shell.write(oprot);
+ return;
+ case JAVA_OBJECT:
+ JavaObject java_object = (JavaObject)value_;
+ java_object.write(oprot);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected Object tupleSchemeReadValue(org.apache.thrift.protocol.TProtocol iprot, short fieldID) throws org.apache.thrift.TException {
+ _Fields setField = _Fields.findByThriftId(fieldID);
+ if (setField != null) {
+ switch (setField) {
+ case SERIALIZED_JAVA:
+ ByteBuffer serialized_java;
+ serialized_java = iprot.readBinary();
+ return serialized_java;
+ case SHELL:
+ ShellComponent shell;
+ shell = new ShellComponent();
+ shell.read(iprot);
+ return shell;
+ case JAVA_OBJECT:
+ JavaObject java_object;
+ java_object = new JavaObject();
+ java_object.read(iprot);
+ return java_object;
+ default:
+ throw new IllegalStateException("setField wasn't null, but didn't match any of the case statements!");
+ }
+ } else {
+ throw new TProtocolException("Couldn't find a field with field id " + fieldID);
+ }
+ }
+
+ @Override
+ protected void tupleSchemeWriteValue(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ switch (setField_) {
+ case SERIALIZED_JAVA:
+ ByteBuffer serialized_java = (ByteBuffer)value_;
+ oprot.writeBinary(serialized_java);
+ return;
+ case SHELL:
+ ShellComponent shell = (ShellComponent)value_;
+ shell.write(oprot);
+ return;
+ case JAVA_OBJECT:
+ JavaObject java_object = (JavaObject)value_;
+ java_object.write(oprot);
+ return;
+ default:
+ throw new IllegalStateException("Cannot write union with unknown field " + setField_);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TField getFieldDesc(_Fields setField) {
+ switch (setField) {
+ case SERIALIZED_JAVA:
+ return SERIALIZED_JAVA_FIELD_DESC;
+ case SHELL:
+ return SHELL_FIELD_DESC;
+ case JAVA_OBJECT:
+ return JAVA_OBJECT_FIELD_DESC;
+ default:
+ throw new IllegalArgumentException("Unknown field id " + setField);
+ }
+ }
+
+ @Override
+ protected org.apache.thrift.protocol.TStruct getStructDesc() {
+ return STRUCT_DESC;
+ }
+
+ @Override
+ protected _Fields enumForId(short id) {
+ return _Fields.findByThriftIdOrThrow(id);
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+
+ public byte[] get_serialized_java() {
+ set_serialized_java(org.apache.thrift.TBaseHelper.rightSize(buffer_for_serialized_java()));
+ ByteBuffer b = buffer_for_serialized_java();
+ return b == null ? null : b.array();
+ }
+
+ public ByteBuffer buffer_for_serialized_java() {
+ if (getSetField() == _Fields.SERIALIZED_JAVA) {
+ return org.apache.thrift.TBaseHelper.copyBinary((ByteBuffer)getFieldValue());
+ } else {
+ throw new RuntimeException("Cannot get field 'serialized_java' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_serialized_java(byte[] value) {
+ set_serialized_java(ByteBuffer.wrap(Arrays.copyOf(value, value.length)));
+ }
+
+ public void set_serialized_java(ByteBuffer value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.SERIALIZED_JAVA;
+ value_ = value;
+ }
+
+ public ShellComponent get_shell() {
+ if (getSetField() == _Fields.SHELL) {
+ return (ShellComponent)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'shell' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_shell(ShellComponent value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.SHELL;
+ value_ = value;
+ }
+
+ public JavaObject get_java_object() {
+ if (getSetField() == _Fields.JAVA_OBJECT) {
+ return (JavaObject)getFieldValue();
+ } else {
+ throw new RuntimeException("Cannot get field 'java_object' because union is currently set to " + getFieldDesc(getSetField()).name);
+ }
+ }
+
+ public void set_java_object(JavaObject value) {
+ if (value == null) throw new NullPointerException();
+ setField_ = _Fields.JAVA_OBJECT;
+ value_ = value;
+ }
+
+ public boolean is_set_serialized_java() {
+ return setField_ == _Fields.SERIALIZED_JAVA;
+ }
+
+
+ public boolean is_set_shell() {
+ return setField_ == _Fields.SHELL;
+ }
+
+
+ public boolean is_set_java_object() {
+ return setField_ == _Fields.JAVA_OBJECT;
+ }
+
+
+ public boolean equals(Object other) {
+ if (other instanceof ComponentObject) {
+ return equals((ComponentObject)other);
+ } else {
+ return false;
+ }
+ }
+
+ public boolean equals(ComponentObject other) {
+ return other != null && getSetField() == other.getSetField() && getFieldValue().equals(other.getFieldValue());
+ }
+
+ @Override
+ public int compareTo(ComponentObject other) {
+ int lastComparison = org.apache.thrift.TBaseHelper.compareTo(getSetField(), other.getSetField());
+ if (lastComparison == 0) {
+ return org.apache.thrift.TBaseHelper.compareTo(getFieldValue(), other.getFieldValue());
+ }
+ return lastComparison;
+ }
+
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+ list.add(this.getClass().getName());
+ org.apache.thrift.TFieldIdEnum setField = getSetField();
+ if (setField != null) {
+ list.add(setField.getThriftFieldId());
+ Object value = getFieldValue();
+ if (value instanceof org.apache.thrift.TEnum) {
+ list.add(((org.apache.thrift.TEnum)getFieldValue()).getValue());
+ } else {
+ list.add(value);
+ }
+ }
+ return list.hashCode();
+ }
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+
+}
[56/60] [abbrv] storm git commit: remove jstorm-utility directory
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/JSONUtil.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/JSONUtil.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/JSONUtil.java
deleted file mode 100755
index 673576c..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/JSONUtil.java
+++ /dev/null
@@ -1,125 +0,0 @@
-package com.dianping.cosmos.util;
-
-import java.io.IOException;
-import java.io.StringWriter;
-import java.util.List;
-import java.util.Map;
-
-import org.codehaus.jackson.JsonFactory;
-import org.codehaus.jackson.JsonGenerator;
-import org.codehaus.jackson.JsonNode;
-import org.codehaus.jackson.JsonProcessingException;
-import org.codehaus.jackson.map.ObjectMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * JSON工具类
- * @author xinchun.wang
- *
- */
-public class JSONUtil {
- private static final Logger LOGGER = LoggerFactory.getLogger(JSONUtil.class);
-
- private static final ObjectMapper MAPPER = new ObjectMapper();
-
- private static final JSONUtil INSTANCE = new JSONUtil();
-
- public static JSONUtil getInstance() {
- return INSTANCE;
- }
-
- private JSONUtil() {
-
- }
-
- /**
- * 将map转化为json
- *
- * @param map
- * @return
- */
- public String formatMap2JSON(Map<String, Object> map) {
- StringWriter stringWriter = new StringWriter();
- String json = "";
- try {
- JsonGenerator gen = new JsonFactory()
- .createJsonGenerator(stringWriter);
- MAPPER.writeValue(gen, map);
- gen.close();
- json = stringWriter.toString();
- } catch (Exception e) {
- LOGGER.error("", e);
- }
- return json;
- }
-
- /**
- * POJO对象转换为JSON
- * @param pojo
- * @return
- */
- public String formatPOJO2JSON(Object pojo) {
- StringWriter stringWriter = new StringWriter();
- String json = "";
- try {
- JsonGenerator gen = new JsonFactory()
- .createJsonGenerator(stringWriter);
- MAPPER.writeValue(gen, pojo);
- gen.close();
- json = stringWriter.toString();
- } catch (Exception e) {
- LOGGER.error(pojo.getClass().getName() + "转json出错", e);
- }
- return json;
- }
-
- /**
- * 将json转化为map
- *
- * @param json
- * @return
- */
- public Map<?, ?> formatJSON2Map(String json) {
- Map<?, ?> map = null;
- try {
- map = MAPPER.readValue(json, Map.class);
- } catch (Exception e) {
- LOGGER.error("formatJsonToMap error, json = " + json, e);
- }
- return map;
- }
-
-
-
- /**
- * JSON转换为List
- * @param json
- * @return
- */
- public List<?> formatJSON2List(String json) {
- List<?> list = null;
- try {
- list = MAPPER.readValue(json, List.class);
- } catch (Exception e) {
- LOGGER.error("formatJSON2List error, json = " + json, e);
- }
- return list;
- }
-
- public boolean equals(String firstJSON, String secondJSON) {
- try {
- JsonNode tree1 = MAPPER.readTree(firstJSON);
- JsonNode tree2 = MAPPER.readTree(secondJSON);
- boolean areEqual = tree1.equals(tree2);
- return areEqual;
- } catch (JsonProcessingException e) {
- LOGGER.error("json compare wrong:" + firstJSON + ";" + secondJSON,
- e);
- } catch (IOException e) {
- LOGGER.error("json compare wrong:" + firstJSON + ";" + secondJSON,
- e);
- }
- return false;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/TupleHelpers.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/TupleHelpers.java b/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/TupleHelpers.java
deleted file mode 100755
index ebcf5bc..0000000
--- a/jstorm-utility/topology-monitor/src/main/java/com/dianping/cosmos/util/TupleHelpers.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.dianping.cosmos.util;
-
-import backtype.storm.Constants;
-import backtype.storm.tuple.Tuple;
-
-public final class TupleHelpers {
-
- private TupleHelpers() {
- }
-
- public static boolean isTickTuple(Tuple tuple) {
- return tuple.getSourceComponent().equals(Constants.SYSTEM_COMPONENT_ID) && tuple.getSourceStreamId().equals(
- Constants.SYSTEM_TICK_STREAM_ID);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/conf/topology.yaml
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/conf/topology.yaml b/jstorm-utility/transaction_meta_spout/conf/topology.yaml
deleted file mode 100644
index 8ebe492..0000000
--- a/jstorm-utility/transaction_meta_spout/conf/topology.yaml
+++ /dev/null
@@ -1,21 +0,0 @@
-
-meta.consumer.group: "S_BATCH_META_TEST"
-meta.topic: "jae_log"
-meta.subexpress: "*"
-meta.nameserver: null
-#type "yyyyMMddhhmmss"
-#meta.start.timestamp: "20140701000000"
-meta.start.timestamp: null
-meta.batch.message.num: 1024
-meta.max.fail.times: 10
-meta.rebalance.frequency.sec: 60
-
-
-storm.cluster.mode: "local"
-#storm.cluster.mode: "distributed"
-
-topology.name: "batch_test"
-topology.spout.parallel: 1
-topology.bolt.parallel: 1
-topology.message.timeout.secs: 300
-topology.workers: 10
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/pom.xml
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/pom.xml b/jstorm-utility/transaction_meta_spout/pom.xml
deleted file mode 100644
index fd0e1c7..0000000
--- a/jstorm-utility/transaction_meta_spout/pom.xml
+++ /dev/null
@@ -1,68 +0,0 @@
-<?xml version="1.0"?>
-<project xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd" xmlns="http://maven.apache.org/POM/4.0.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
- <modelVersion>4.0.0</modelVersion>
- <parent>
- <groupId>com.alibaba.aloha</groupId>
- <artifactId>aloha-utility</artifactId>
- <version>0.0.1-SNAPSHOT</version>
- </parent>
- <artifactId>transaction_meta_spout</artifactId>
-
- <build>
- <plugins>
- <plugin>
- <artifactId>maven-assembly-plugin</artifactId>
- <configuration>
- <descriptorRefs>
- <descriptorRef>jar-with-dependencies</descriptorRef>
- </descriptorRefs>
- </configuration>
- <executions>
- <execution>
- <id>make-assembly</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- </execution>
- </executions>
- </plugin>
- <plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-compiler-plugin</artifactId>
- <configuration>
- <source>1.6</source>
- <target>1.6</target>
- </configuration>
- </plugin>
- </plugins>
- </build>
-
- <dependencies>
- <dependency>
- <groupId>com.taobao.metaq.final</groupId>
- <artifactId>metaq-client</artifactId>
- <version>3.1.3</version>
- </dependency>
- <!--
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-common</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-client</artifactId>
- <version>3.0.1</version>
- </dependency>
- <dependency>
- <groupId>com.alibaba.rocketmq</groupId>
- <artifactId>rocketmq-remoting</artifactId>
- <version>3.0.1</version>
- </dependency>
- -->
-
- </dependencies>
-
-</project>
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaRebalance.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaRebalance.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaRebalance.java
deleted file mode 100644
index bdab899..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaRebalance.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.util.Map;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.IPostCommit;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-public class BatchMetaRebalance implements IBasicBolt, IPostCommit {
- /** */
- private static final long serialVersionUID = 2991323223385556163L;
-
- private static final Logger LOG = Logger
- .getLogger(BatchMetaRebalance.class);
-
- public static final String BOLT_NAME = BatchMetaRebalance.class
- .getSimpleName();
-
- public static final String REBALANCE_STREAM_ID = "rebalance";
-
- private transient AtomicBoolean isNeedRebalance;
- private transient ScheduledExecutorService scheduExec;
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- isNeedRebalance = new AtomicBoolean(false);
- CheckRebalanceTimer timer = new CheckRebalanceTimer();
-
- int rebalanceTimeInterval = JStormUtils.parseInt(
- stormConf.get("meta.rebalance.frequency.sec"), 3600);
-
- long now = System.currentTimeMillis();
- long next = (now/(rebalanceTimeInterval * 1000) + 1) * rebalanceTimeInterval * 1000;
- long diff = (next - now )/1000;
-
-// Calendar start = Calendar.getInstance();
-//
-// start.add(Calendar.HOUR_OF_DAY, 1);
-//
-// start.set(Calendar.MINUTE, 30);
-// start.set(Calendar.SECOND, 0);
-// start.set(Calendar.MILLISECOND, 0);
-// long startMillis = start.getTimeInMillis();
-//
-// long now = System.currentTimeMillis();
-//
-// long diff = (startMillis - now) / (1000);
-
- ScheduledExecutorService scheduExec = null;
-
- scheduExec = Executors.newSingleThreadScheduledExecutor();
- scheduExec.scheduleAtFixedRate(timer, diff, rebalanceTimeInterval,
- TimeUnit.SECONDS);
-
- LOG.info("Successfully init rebalance timer");
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- LOG.warn("Receive unkonw message");
- }
-
- @Override
- public void cleanup() {
- scheduExec.shutdown();
- LOG.info("Successfully do cleanup");
- }
-
- @Override
- public void postCommit(BatchId id, BasicOutputCollector collector) {
- if (isNeedRebalance.get() == true) {
- isNeedRebalance.set(false);
- collector.emit(REBALANCE_STREAM_ID, new Values(id));
- LOG.info("Emit rebalance command");
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(REBALANCE_STREAM_ID, new Fields("BatchId"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- public class CheckRebalanceTimer implements Runnable {
- public void run() {
- BatchMetaRebalance.this.isNeedRebalance.set(true);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaSpout.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaSpout.java
deleted file mode 100644
index 8effd5b..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaSpout.java
+++ /dev/null
@@ -1,131 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.IBatchSpout;
-import com.alibaba.jstorm.batch.meta.MetaSimpleClient;
-import com.alibaba.jstorm.batch.meta.MetaSpoutConfig;
-import com.alibaba.jstorm.batch.util.BatchCommon;
-import com.alibaba.jstorm.cluster.ClusterState;
-import com.alibaba.rocketmq.common.message.MessageExt;
-
-public class BatchMetaSpout implements IBatchSpout{
- private static final long serialVersionUID = 5720810158625748041L;
-
- private static final Logger LOG = Logger.getLogger(BatchMetaSpout.class);
-
- public static final String SPOUT_NAME = BatchMetaSpout.class.getSimpleName();
-
- private Map conf;
-
- private String taskName;
- private int taskIndex;
- private int taskParallel;
-
- private transient MetaSimpleClient metaClient;
- private MetaSpoutConfig metaSpoutConfig;
-
-
-
- public BatchMetaSpout(MetaSpoutConfig metaSpoutConfig) {
- this.metaSpoutConfig = metaSpoutConfig;
- }
-
- public void initMetaClient() throws Exception {
- ClusterState zkClient = BatchCommon.getZkClient(conf);
- metaClient = new MetaSimpleClient(metaSpoutConfig, zkClient, taskIndex,
- taskParallel);
-
- metaClient.init();
-
- LOG.info("Successfully init meta client " + taskName);
- }
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- this.conf = stormConf;
-
- taskName = context.getThisComponentId() + "_" + context.getThisTaskId();
-
- taskIndex = context.getThisTaskIndex();
-
- taskParallel = context.getComponentTasks(context.getThisComponentId())
- .size();
-
- try {
- initMetaClient();
- } catch (Exception e) {
- LOG.info("Failed to init Meta Client,", e);
- throw new RuntimeException(e);
- }
-
- LOG.info(taskName + " successfully do prepare ");
- }
-
- public void emitBatch(BatchId batchId, BasicOutputCollector collector) {
- List<MessageExt> msgs = metaClient.fetchOneBatch();
- for (MessageExt msgExt : msgs) {
- collector.emit(new Values(batchId, msgExt));
- }
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
-
- String streamId = input.getSourceStreamId();
- if (streamId.equals(BatchMetaRebalance.REBALANCE_STREAM_ID)) {
- try {
- metaClient.rebalanceMqList();
- } catch (Exception e) {
- LOG.warn("Failed to do rebalance operation", e);
- }
- }else {
- BatchId batchId = (BatchId) input.getValue(0);
-
- emitBatch(batchId, collector);
- }
-
- }
-
- @Override
- public void cleanup() {
- metaClient.cleanup();
-
- }
-
- @Override
- public byte[] commit(BatchId id) throws FailedException {
- return metaClient.commit(id);
- }
-
- @Override
- public void revert(BatchId id, byte[] commitResult) {
- metaClient.revert(id, commitResult);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("BatchId", "MessageExt"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaTopology.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaTopology.java
deleted file mode 100644
index 63c017d..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/BatchMetaTopology.java
+++ /dev/null
@@ -1,163 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.util.Date;
-import java.util.Map;
-
-import org.yaml.snakeyaml.Yaml;
-
-import backtype.storm.Config;
-import backtype.storm.StormSubmitter;
-import backtype.storm.generated.AlreadyAliveException;
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.TopologyAssignException;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.TopologyBuilder;
-
-import com.alibaba.jstorm.batch.BatchTopologyBuilder;
-import com.alibaba.jstorm.batch.IBatchSpout;
-import com.alibaba.jstorm.batch.meta.MetaSpoutConfig;
-import com.alibaba.jstorm.cluster.StormConfig;
-import com.alibaba.jstorm.local.LocalCluster;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.jstorm.utils.TimeFormat;
-
-public class BatchMetaTopology {
- private static String topologyName;
-
- private static Map conf;
-
- private static void LoadYaml(String confPath) {
-
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
-
- conf = (Map) yaml.load(stream);
- if (conf == null || conf.isEmpty() == true) {
- throw new RuntimeException("Failed to read config file");
- }
-
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- topologyName = (String) conf.get(Config.TOPOLOGY_NAME);
- return;
- }
-
- public static MetaSpoutConfig getMetaSpoutConfig(Map conf) {
- String consumerGroup = (String) conf.get("meta.consumer.group");
- String topic = (String) conf.get("meta.topic");
- String nameServer = (String) conf.get("meta.nameserver");
- String subExpress = (String) conf.get("meta.subexpress");
-
- String startTimestampStr = (String) conf.get("meta.start.timestamp");
- Long startTimeStamp = null;
- if (startTimestampStr != null) {
- Date date = TimeFormat.getSecond(startTimestampStr);
- startTimeStamp = date.getTime();
- }
-
- int batchMessageNum = JStormUtils.parseInt(
- conf.get("meta.batch.message.num"), 1024);
- int maxFailTimes = JStormUtils.parseInt(
- conf.get("meta.max.fail.times"), 10);
-
- MetaSpoutConfig ret = new MetaSpoutConfig(consumerGroup, nameServer,
- topic, subExpress);
- ret.setStartTimeStamp(startTimeStamp);
- ret.setBatchMsgNum(batchMessageNum);
- ret.setMaxFailTimes(maxFailTimes);
-
- return ret;
- }
-
- public static TopologyBuilder SetBuilder() {
-
- BatchTopologyBuilder batchTopologyBuilder = new BatchTopologyBuilder(
- topologyName);
-
- MetaSpoutConfig metaSpoutConfig = getMetaSpoutConfig(conf);
-
- BoltDeclarer rebalanceDeclarer = batchTopologyBuilder.setBolt(
- BatchMetaRebalance.BOLT_NAME, new BatchMetaRebalance(), 1);
-
- IBatchSpout batchSpout = new BatchMetaSpout(metaSpoutConfig);
- int spoutParal = JStormUtils.parseInt(
- conf.get("topology.spout.parallel"), 1);
- BoltDeclarer spoutDeclarer = batchTopologyBuilder.setSpout(
- BatchMetaSpout.SPOUT_NAME, batchSpout, spoutParal);
- spoutDeclarer.allGrouping(BatchMetaRebalance.BOLT_NAME,
- BatchMetaRebalance.REBALANCE_STREAM_ID);
-
- int boltParallel = JStormUtils.parseInt(
- conf.get("topology.bolt.parallel"), 1);
- BoltDeclarer transformDeclarer = batchTopologyBuilder.setBolt(
- TransformBolt.BOLT_NAME, new TransformBolt(), boltParallel);
- transformDeclarer.shuffleGrouping(BatchMetaSpout.SPOUT_NAME);
-
- BoltDeclarer countDeclarer = batchTopologyBuilder.setBolt(
- CountBolt.COUNT_BOLT_NAME, new CountBolt(), boltParallel);
- countDeclarer.shuffleGrouping(TransformBolt.BOLT_NAME);
-
- BoltDeclarer sumDeclarer = batchTopologyBuilder.setBolt(
- CountBolt.SUM_BOLT_NAME, new CountBolt(), boltParallel);
- sumDeclarer.shuffleGrouping(TransformBolt.BOLT_NAME);
-
- BoltDeclarer dbDeclarer = batchTopologyBuilder.setBolt(
- DBBolt.BOLT_NAME, new DBBolt(), 1);
- dbDeclarer.shuffleGrouping(CountBolt.COUNT_BOLT_NAME).shuffleGrouping(
- CountBolt.SUM_BOLT_NAME);
-
- return batchTopologyBuilder.getTopologyBuilder();
- }
-
- public static void SetLocalTopology() throws Exception {
- TopologyBuilder builder = SetBuilder();
-
- LocalCluster cluster = new LocalCluster();
- cluster.submitTopology(topologyName, conf, builder.createTopology());
-
- Thread.sleep(600000);
-
- cluster.shutdown();
- }
-
- public static void SetRemoteTopology() throws AlreadyAliveException,
- InvalidTopologyException, TopologyAssignException {
-
- TopologyBuilder builder = SetBuilder();
-
- StormSubmitter.submitTopology(topologyName, conf,
- builder.createTopology());
-
- }
-
- public static void main(String[] args) throws Exception {
-
- if (args.length < 1) {
- System.err.println("Please input parameters topology.yaml");
- System.exit(-1);
- }
-
- LoadYaml(args[0]);
-
- boolean isLocal = StormConfig.local_mode(conf);
-
- if (isLocal) {
- SetLocalTopology();
- return;
- } else {
- SetRemoteTopology();
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/CountBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/CountBolt.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/CountBolt.java
deleted file mode 100644
index eceb4b9..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/CountBolt.java
+++ /dev/null
@@ -1,84 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.TimeCacheMap;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.IPrepareCommit;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-public class CountBolt implements IBasicBolt, IPrepareCommit {
- private static final long serialVersionUID = 5720810158625748042L;
-
- private static final Logger LOG = Logger.getLogger(CountBolt.class);
-
- public static final String COUNT_BOLT_NAME = "Count";
- public static final String SUM_BOLT_NAME = "Sum";
-
- private Map conf;
-
- private TimeCacheMap<BatchId, AtomicLong> counters;
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- this.conf = stormConf;
-
- int timeoutSeconds = JStormUtils.parseInt(
- conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS), 30);
- counters = new TimeCacheMap<BatchId, AtomicLong>(timeoutSeconds);
-
- LOG.info("Successfully do prepare");
-
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- BatchId id = (BatchId) input.getValue(0);
-
- AtomicLong counter = counters.get(id);
- if (counter == null) {
- counter = new AtomicLong(0);
- counters.put(id, counter);
- }
-
- counter.incrementAndGet();
-
- }
-
- @Override
- public void cleanup() {
- LOG.info("Successfully do cleanup");
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("BatchId", "counters"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- @Override
- public void prepareCommit(BatchId id, BasicOutputCollector collector) {
- AtomicLong counter = (AtomicLong) counters.remove(id);
- if (counter == null) {
- counter = new AtomicLong(0);
- }
-
- collector.emit(new Values(id, counter.get()));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/DBBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/DBBolt.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/DBBolt.java
deleted file mode 100644
index bf9aac5..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/DBBolt.java
+++ /dev/null
@@ -1,261 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.io.Serializable;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.concurrent.atomic.AtomicBoolean;
-import java.util.concurrent.atomic.AtomicLong;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-import org.apache.log4j.Logger;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.TimeCacheMap;
-import backtype.storm.utils.Utils;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.ICommitter;
-import com.alibaba.jstorm.utils.JStormUtils;
-
-public class DBBolt implements IBasicBolt, ICommitter, Runnable {
- private static final long serialVersionUID = 5720810158625748043L;
-
- private static final Logger LOG = Logger.getLogger(DBBolt.class);
-
- public static final String BOLT_NAME = DBBolt.class.getSimpleName();
-
- private TimeCacheMap<BatchId, AtomicLong> counters;
- private TimeCacheMap<BatchId, AtomicLong> sums;
-
- private Map conf;
-
- private LinkedBlockingQueue<CommitedValue> revertQueue;
- private Thread revertThread;
- private AtomicBoolean isRunRevert;
-
- public void initRevert() {
- revertQueue = new LinkedBlockingQueue<DBBolt.CommitedValue>();
- isRunRevert = new AtomicBoolean(false);
- revertThread = new Thread(this);
- revertThread.start();
-
- LOG.info("Successfully init revert thread");
-
- }
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- this.conf = stormConf;
-
- int timeoutSeconds = JStormUtils.parseInt(
- conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS), 30);
-
- counters = new TimeCacheMap<BatchId, AtomicLong>(timeoutSeconds);
- sums = new TimeCacheMap<BatchId, AtomicLong>(timeoutSeconds);
-
- initRevert();
-
- LOG.info("Successfully do prepare");
-
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- BatchId batchId = (BatchId) input.getValue(0);
-
- if (input.getSourceComponent().equals(CountBolt.COUNT_BOLT_NAME)) {
- AtomicLong counter = counters.get(batchId);
- if (counter == null) {
- counter = new AtomicLong(0);
- counters.put(batchId, counter);
- }
- long value = input.getLong(1);
-
- counter.addAndGet(value);
-
- } else if (input.getSourceComponent().equals(CountBolt.SUM_BOLT_NAME)) {
- AtomicLong sum = sums.get(batchId);
- if (sum == null) {
- sum = new AtomicLong(0);
- sums.put(batchId, sum);
- }
- long value = input.getLong(1);
-
- sum.addAndGet(value);
- } else {
- LOG.warn("Unknow source type");
- }
- }
-
- @Override
- public void cleanup() {
- LOG.info("Begin to exit ");
- isRunRevert.set(false);
- try {
- revertThread.join();
- } catch (InterruptedException e) {
-
- }
-
- LOG.info("Successfully exit ");
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
- @Override
- public byte[] commit(BatchId id) throws FailedException {
- AtomicLong count = (AtomicLong) counters.remove(id);
- if (count == null) {
- count = new AtomicLong(0);
- }
-
- AtomicLong sum = (AtomicLong) sums.remove(id);
- if (sum == null) {
- sum = new AtomicLong(0);
- }
-
- CommitedValue commitedValue = new CommitedValue(count, sum);
-
- try {
-
- commitedValue.commit();
-
- return Utils.serialize(commitedValue);
- } catch (Exception e) {
- LOG.error("Failed to commit " + commitedValue, e);
- throw new FailedException(e);
- }
-
- }
-
- @Override
- public void revert(BatchId id, byte[] commitResult) {
- CommitedValue commitedValue = (CommitedValue) Utils
- .deserialize(commitResult);
- if (commitedValue == null) {
- LOG.warn("Failed to deserialize commited value");
- return;
- }
-
- revertQueue.offer(commitedValue);
- }
-
- @Override
- public void run() {
- LOG.info("Begin to run revert thread");
- isRunRevert.set(true);
-
- while (isRunRevert.get() == true) {
- CommitedValue commitedValue = null;
- try {
- commitedValue = revertQueue.take();
- } catch (InterruptedException e1) {
- }
- if (commitedValue == null) {
- continue;
- }
-
- try {
- commitedValue.revert();
- LOG.info("Successfully revert " + commitedValue);
- } catch (Exception e) {
- LOG.info("Failed to do revert " + commitedValue);
- JStormUtils.sleepMs(100);
- revertQueue.offer(commitedValue);
- }
- }
-
- LOG.info("Successfully quit revert thread");
- }
-
- public static class CommitedValue implements Serializable {
- private static final long serialVersionUID = 5720810158625748047L;
-
- private final AtomicLong counter;
- private final AtomicLong sum;
-
- private static AtomicLong dbCounter = new AtomicLong(0);
- private static AtomicLong dbSum = new AtomicLong(0);
-
- public CommitedValue(AtomicLong counter, AtomicLong sum) {
- this.counter = counter;
- this.sum = sum;
- }
-
- public void commit() throws Exception {
- boolean doCount = false;
- boolean doSum = false;
- try {
- long countValue = dbCounter.addAndGet(counter.get());
- doCount = true;
- long sumValue = dbSum.addAndGet(sum.get());
- doSum = true;
-
- LOG.info("Successfully commit " + this);
- LOG.info("DB counter:" + countValue + ", sum:" + sumValue);
- } catch (Exception e) {
- if (doSum) {
- dbSum.addAndGet(-sum.get());
- LOG.info("Revert sum, " + sum.get());
- }
-
- if (doCount) {
- dbCounter.addAndGet(-counter.get());
- LOG.info("Revert count," + counter.get());
- }
-
- LOG.warn(e.getCause(), e);
- throw e;
- }
-
- }
-
- public void revert() throws Exception {
- boolean doCount = false;
- boolean doSum = false;
- try {
- long countValue = dbCounter.addAndGet(-counter.get());
- doCount = true;
- long sumValue = dbSum.addAndGet(-sum.get());
- doSum = true;
-
- LOG.info("Successfully commit " + this);
- LOG.info("DB counter:" + countValue + ", sum:" + sumValue);
- } catch (Exception e) {
- if (doSum) {
- dbSum.addAndGet(sum.get());
- LOG.info("Revert sum, " + sum.get());
- }
-
- if (doCount) {
- dbCounter.addAndGet(counter.get());
- LOG.info("Revert count," + counter.get());
- }
-
- LOG.warn(e.getCause(), e);
- throw e;
- }
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/TransformBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/TransformBolt.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/TransformBolt.java
deleted file mode 100644
index fe7c690..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/example/TransformBolt.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package com.alibaba.jstorm.batch.example;
-
-import java.util.Map;
-import java.util.Random;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-
-import com.alibaba.jstorm.batch.BatchId;
-
-public class TransformBolt implements IBasicBolt {
- private static final long serialVersionUID = 5720810158625748044L;
-
- private static final Logger LOG = Logger.getLogger(TransformBolt.class);
-
- public static final String BOLT_NAME = TransformBolt.class.getSimpleName();
-
- private Map conf;
-
- private Random rand;
-
- @Override
- public void prepare(Map stormConf, TopologyContext context) {
- this.conf = stormConf;
- this.rand = new Random();
- rand.setSeed(System.currentTimeMillis());
-
- LOG.info("Successfully do prepare");
- }
-
- @Override
- public void execute(Tuple input, BasicOutputCollector collector) {
- BatchId id = (BatchId) input.getValue(0);
-
- long value = rand.nextInt(100);
-
- collector.emit(new Values(id, value));
-
- }
-
- @Override
- public void cleanup() {
- LOG.info("Successfully do cleanup");
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("BatchId", "Value"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSimpleClient.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSimpleClient.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSimpleClient.java
deleted file mode 100644
index ee908d1..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSimpleClient.java
+++ /dev/null
@@ -1,340 +0,0 @@
-package com.alibaba.jstorm.batch.meta;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.topology.FailedException;
-
-import com.alibaba.jstorm.batch.BatchId;
-import com.alibaba.jstorm.batch.ICommitter;
-import com.alibaba.jstorm.batch.util.BatchDef;
-import com.alibaba.jstorm.cluster.ClusterState;
-import com.alibaba.jstorm.utils.JStormUtils;
-import com.alibaba.rocketmq.client.consumer.PullResult;
-import com.alibaba.rocketmq.client.consumer.PullStatus;
-import com.alibaba.rocketmq.client.exception.MQClientException;
-import com.alibaba.rocketmq.common.message.MessageExt;
-import com.alibaba.rocketmq.common.message.MessageQueue;
-import com.taobao.metaq.client.MetaPullConsumer;
-
-public class MetaSimpleClient implements ICommitter{
- private static final Logger LOG = Logger.getLogger(MetaSimpleClient.class);
-
- private final MetaSpoutConfig metaSpoutConfig;
-
- private final int taskIndex;
- private final int taskParallel;
- private int oneQueueFetchSize;
-
- private Map<MessageQueue, Long> currentOffsets;
- private Map<MessageQueue, Long> frontOffsets;
- private Map<MessageQueue, Long> backendOffset;
-
- private MetaPullConsumer consumer;
-
- private static String nameServer;
-
- private final ClusterState zkClient;
-
- public MetaSimpleClient(MetaSpoutConfig config,
- ClusterState zkClient,
- int taskIndex,
- int taskParall) {
- this.metaSpoutConfig = config;
- this.zkClient = zkClient;
- this.taskIndex = taskIndex;
- this.taskParallel = taskParall;
- }
-
- public void initMetaConsumer() throws MQClientException {
- LOG.info("MetaSpoutConfig:" + metaSpoutConfig);
-
- consumer = new MetaPullConsumer(metaSpoutConfig.getConsumerGroup());
- consumer.setInstanceName(taskIndex + "." + JStormUtils.process_pid());
-
- if (metaSpoutConfig.getNameServer() != null) {
- // this is for alipay
- if (nameServer == null) {
- nameServer = metaSpoutConfig.getNameServer();
-
- System.setProperty("rocketmq.namesrv.domain",
- metaSpoutConfig.getNameServer());
- }else if (metaSpoutConfig.getNameServer().equals(nameServer) == false) {
- throw new RuntimeException("Different nameserver address in the same worker "
- + nameServer + ":" + metaSpoutConfig.getNameServer());
-
- }
- }
-
- consumer.start();
-
- LOG.info("Successfully start meta consumer");
- }
-
- public int getOneQueueFetchSize() {
- int batchSize = metaSpoutConfig.getBatchMsgNum();
-
- int oneFetchSize = batchSize/(taskParallel * currentOffsets.size());
- if (oneFetchSize <= 0) {
- oneFetchSize = 1;
- }
-
- LOG.info("One queue fetch size:" + oneFetchSize);
- return oneFetchSize;
- }
-
- public void init() throws Exception {
- initMetaConsumer();
-
- frontOffsets = initOffset();
- currentOffsets = frontOffsets;
- backendOffset = new HashMap<MessageQueue, Long>();
- backendOffset.putAll(frontOffsets);
-
- oneQueueFetchSize = getOneQueueFetchSize();
- }
-
-
- protected Set<MessageQueue> getMQ() throws MQClientException {
- Set<MessageQueue> mqs = consumer.
- fetchSubscribeMessageQueues(metaSpoutConfig.getTopic());
- if (taskParallel > mqs.size()) {
- throw new RuntimeException("Two much task to fetch " + metaSpoutConfig);
- }
-
- List<MessageQueue> mqList = JStormUtils.mk_list(mqs);
- Set<MessageQueue> ret = new HashSet<MessageQueue>();
- for (int i = taskIndex; i < mqList.size(); i += taskParallel) {
- ret.add(mqList.get(i));
- }
-
- if (ret.size() == 0) {
- throw new RuntimeException("No meta queue need to be consume");
- }
- return ret;
- }
-
- protected Map<MessageQueue, Long> initOffset() throws MQClientException {
- Set<MessageQueue> queues = getMQ();
-
- Map<MessageQueue, Long> ret = new HashMap<MessageQueue, Long>();
- Set<MessageQueue> noOffsetQueues = new HashSet<MessageQueue>();
-
-
- if (metaSpoutConfig.getStartTimeStamp() != null) {
- Long timeStamp = metaSpoutConfig.getStartTimeStamp();
-
-
- for (MessageQueue mq : queues) {
- long offset = consumer.searchOffset(mq, timeStamp);
- if (offset >= 0) {
- LOG.info("Successfully get " + mq + " offset of timestamp " + new Date(timeStamp));
- ret.put(mq, offset);
- }else {
- LOG.info("Failed to get " + mq + " offset of timestamp " + new Date(timeStamp));
- noOffsetQueues.add(mq);
- }
- }
- }else {
- noOffsetQueues.addAll(queues);
- }
-
- if (noOffsetQueues.size() == 0) {
- return ret;
- }
-
- for (MessageQueue mq : noOffsetQueues) {
- long offset = getOffsetFromZk(mq);
-
- ret.put(mq, offset);
- }
-
- return ret;
- }
-
-
- protected String getZkPath(MessageQueue mq) {
- StringBuffer sb = new StringBuffer();
- sb.append(BatchDef.ZK_SEPERATOR);
- sb.append(metaSpoutConfig.getConsumerGroup());
- sb.append(BatchDef.ZK_SEPERATOR);
- sb.append(mq.getBrokerName());
- sb.append(BatchDef.ZK_SEPERATOR);
- sb.append(mq.getQueueId());
-
- return sb.toString();
- }
-
- protected long getOffsetFromZk(MessageQueue mq) {
- String path = getZkPath(mq);
-
- try {
- if (zkClient.node_existed(path, false) == false) {
- LOG.info("No zk node of " + path);
- return 0;
- }
-
- byte[] data = zkClient.get_data(path, false);
- String value = new String(data);
-
- long ret = Long.valueOf(value);
- return ret;
- }catch (Exception e) {
- LOG.warn("Failed to get offset,", e);
- return 0;
- }
- }
-
- protected void updateOffsetToZk(MessageQueue mq, Long offset) throws Exception {
- String path = getZkPath(mq);
- byte[] data = String.valueOf(offset).getBytes();
- zkClient.set_data(path, data);
-
- }
-
- protected void updateOffsetToZk(Map<MessageQueue, Long> mqs) throws Exception{
- for (Entry<MessageQueue, Long> entry : mqs.entrySet()) {
- MessageQueue mq = entry.getKey();
- Long offset = entry.getValue();
-
- updateOffsetToZk(mq, offset);
- }
-
- LOG.info("Update zk offset," + mqs);
- }
-
- protected void switchOffsetMap() {
- Map<MessageQueue, Long> tmp = frontOffsets;
-
- frontOffsets = backendOffset;
- backendOffset = tmp;
-
- currentOffsets = frontOffsets;
- }
-
- @Override
- public byte[] commit(BatchId id) throws FailedException {
- try {
- updateOffsetToZk(currentOffsets);
- switchOffsetMap();
- }catch(Exception e) {
- LOG.warn("Failed to update offset to ZK", e);
- throw new FailedException(e);
- }
-
- return null;
- }
-
- @Override
- public void revert(BatchId id, byte[] commitResult) {
- try {
- switchOffsetMap();
- updateOffsetToZk(currentOffsets);
-
- }catch(Exception e) {
- LOG.warn("Failed to update offset to ZK", e);
- throw new FailedException(e);
- }
- }
-
- /**
- * rebalanceMqList must run after commit
- *
- * @throws MQClientException
- */
- public void rebalanceMqList() throws Exception {
- LOG.info("Begin to do rebalance operation");
- Set<MessageQueue> newMqs = getMQ();
-
- Set<MessageQueue> oldMqs = currentOffsets.keySet();
-
- if (oldMqs.equals(newMqs) == true) {
- LOG.info("No change of meta queues " + newMqs);
- return ;
- }
-
- Set<MessageQueue> removeMqs = new HashSet<MessageQueue>();
- removeMqs.addAll(oldMqs);
- removeMqs.removeAll(newMqs);
-
- Set<MessageQueue> addMqs = new HashSet<MessageQueue>();
- addMqs.addAll(newMqs);
- addMqs.removeAll(oldMqs);
-
- LOG.info("Remove " + removeMqs);
- for (MessageQueue mq : removeMqs) {
- Long offset = frontOffsets.remove(mq);
- updateOffsetToZk(mq, offset);
-
- backendOffset.remove(mq);
-
- }
-
- LOG.info("Add " + addMqs);
- for (MessageQueue mq : addMqs) {
- long offset = getOffsetFromZk(mq);
- frontOffsets.put(mq, offset);
- backendOffset.put(mq, offset);
- }
- }
-
- public List<MessageExt> fetchOneBatch() {
- List<MessageExt> ret = new ArrayList<MessageExt>();
-
-
- String subexpress = metaSpoutConfig.getSubExpress();
- for(Entry<MessageQueue, Long>entry : currentOffsets.entrySet()) {
- MessageQueue mq = entry.getKey();
- Long offset = entry.getValue();
-
-
- int fetchSize = 0;
- int oneFetchSize = Math.min(oneQueueFetchSize, 32);
-
- while(fetchSize < oneQueueFetchSize) {
-
- PullResult pullResult = null;
- try {
- pullResult = consumer.pullBlockIfNotFound(mq, subexpress, offset, oneFetchSize);
- offset = pullResult.getNextBeginOffset();
- PullStatus status = pullResult.getPullStatus();
- if (status == PullStatus.FOUND) {
- List<MessageExt> msgList = pullResult.getMsgFoundList();
- ret.addAll(msgList);
- fetchSize += msgList.size();
- continue;
- }else if (status == PullStatus.NO_MATCHED_MSG) {
- continue;
- }else if (status == PullStatus.NO_NEW_MSG ) {
- break;
- }else if (status == PullStatus.OFFSET_ILLEGAL) {
- break;
- }else {
-
- break;
- }
- }
- catch (Exception e) {
- LOG.warn("Failed to fetch messages of " + mq + ":" + offset, e);
- break;
- }
- }
-
- backendOffset.put(mq, offset);
- }
-
- return ret;
- }
-
- public void cleanup() {
- consumer.shutdown();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSpoutConfig.java
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSpoutConfig.java b/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSpoutConfig.java
deleted file mode 100644
index ac25825..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/java/com/alibaba/jstorm/batch/meta/MetaSpoutConfig.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package com.alibaba.jstorm.batch.meta;
-
-import java.io.Serializable;
-import java.util.Properties;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.commons.lang.builder.ToStringStyle;
-
-/**
- * Meta Spout Setting
- *
- * All needed configs must prepare before submit topopoly
- *
- * @author longda
- */
-public class MetaSpoutConfig implements Serializable {
-
- private static final long serialVersionUID = 4157424979688593280L;
-
- private final String consumerGroup;
-
- /**
- * Alipay need set nameServer, taobao don't need set this field
- */
- private final String nameServer;
-
- private final String topic;
-
- private final String subExpress;
-
- /**
- * The max allowed failures for one single message, skip the failure message
- * if excesses
- *
- * -1 means try again until success
- */
- private int maxFailTimes = DEFAULT_FAIL_TIME;
- public static final int DEFAULT_FAIL_TIME = 10;
-
- /**
- * One batch's message number
- *
- */
- private int batchMsgNum = DEFAULT_BATCH_MSG_NUM;
- public static final int DEFAULT_BATCH_MSG_NUM = 256;
-
- /**
- * Consumer start time Null means start from the last consumption
- * time(CONSUME_FROM_LAST_OFFSET)
- *
- */
- private Long startTimeStamp;
-
- private Properties peroperties;
-
- public MetaSpoutConfig(String consumerGroup, String nameServer,
- String topic, String subExpress) {
- super();
- this.consumerGroup = consumerGroup;
- this.nameServer = nameServer;
- this.topic = topic;
- this.subExpress = subExpress;
- }
-
- public int getMaxFailTimes() {
- return maxFailTimes;
- }
-
- public void setMaxFailTimes(int maxFailTimes) {
- this.maxFailTimes = maxFailTimes;
- }
-
- public Long getStartTimeStamp() {
- return startTimeStamp;
- }
-
- public void setStartTimeStamp(Long startTimeStamp) {
- this.startTimeStamp = startTimeStamp;
- }
-
- public Properties getPeroperties() {
- return peroperties;
- }
-
- public void setPeroperties(Properties peroperties) {
- this.peroperties = peroperties;
- }
-
- public String getConsumerGroup() {
- return consumerGroup;
- }
-
- public String getNameServer() {
- return nameServer;
- }
-
- public String getTopic() {
- return topic;
- }
-
- public String getSubExpress() {
- return subExpress;
- }
-
- public int getBatchMsgNum() {
- return batchMsgNum;
- }
-
- public void setBatchMsgNum(int batchMsgNum) {
- this.batchMsgNum = batchMsgNum;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this,
- ToStringStyle.SHORT_PREFIX_STYLE);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e8f64d5e/jstorm-utility/transaction_meta_spout/src/main/resources/metaspout.default.prop
----------------------------------------------------------------------
diff --git a/jstorm-utility/transaction_meta_spout/src/main/resources/metaspout.default.prop b/jstorm-utility/transaction_meta_spout/src/main/resources/metaspout.default.prop
deleted file mode 100644
index 8b6e45d..0000000
--- a/jstorm-utility/transaction_meta_spout/src/main/resources/metaspout.default.prop
+++ /dev/null
@@ -1,15 +0,0 @@
-meta.consumer.group=test1Groupaa
-meta.topic=tlog-test
-meta.subexpress=*
-meta.fetch.max.msg=128
-
-topology.name=test_meta_spout
-topology.version=1.0.0
-topology.workers=5
-
-topology.spout.pending=100
-topology.spout.parallel=1
-topology.writer.parallel=1
-
-topology.acker.executors=1
-topology.use.transaction=false
[24/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalTopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalTopologyBuilder.java b/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalTopologyBuilder.java
deleted file mode 100644
index b4e437b..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/TransactionalTopologyBuilder.java
+++ /dev/null
@@ -1,566 +0,0 @@
-package backtype.storm.transactional;
-
-import backtype.storm.coordination.IBatchBolt;
-import backtype.storm.coordination.BatchBoltExecutor;
-import backtype.storm.Config;
-import backtype.storm.Constants;
-import backtype.storm.coordination.CoordinatedBolt;
-import backtype.storm.coordination.CoordinatedBolt.IdStreamSpec;
-import backtype.storm.coordination.CoordinatedBolt.SourceArgs;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.BaseConfigurationDeclarer;
-import backtype.storm.topology.BasicBoltExecutor;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.InputDeclarer;
-import backtype.storm.topology.SpoutDeclarer;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.transactional.partitioned.IOpaquePartitionedTransactionalSpout;
-import backtype.storm.transactional.partitioned.IPartitionedTransactionalSpout;
-import backtype.storm.transactional.partitioned.OpaquePartitionedTransactionalSpoutExecutor;
-import backtype.storm.transactional.partitioned.PartitionedTransactionalSpoutExecutor;
-import backtype.storm.tuple.Fields;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Trident subsumes the functionality provided by transactional topologies, so
- * this class is deprecated.
- *
- */
-@Deprecated
-public class TransactionalTopologyBuilder {
- String _id;
- String _spoutId;
- ITransactionalSpout _spout;
- Map<String, Component> _bolts = new HashMap<String, Component>();
- Integer _spoutParallelism;
- List<Map> _spoutConfs = new ArrayList();
-
- // id is used to store the state of this transactionalspout in zookeeper
- // it would be very dangerous to have 2 topologies active with the same id
- // in the same cluster
- public TransactionalTopologyBuilder(String id, String spoutId,
- ITransactionalSpout spout, Number spoutParallelism) {
- _id = id;
- _spoutId = spoutId;
- _spout = spout;
- _spoutParallelism = (spoutParallelism == null) ? null
- : spoutParallelism.intValue();
- }
-
- public TransactionalTopologyBuilder(String id, String spoutId,
- ITransactionalSpout spout) {
- this(id, spoutId, spout, null);
- }
-
- public TransactionalTopologyBuilder(String id, String spoutId,
- IPartitionedTransactionalSpout spout, Number spoutParallelism) {
- this(id, spoutId, new PartitionedTransactionalSpoutExecutor(spout),
- spoutParallelism);
- }
-
- public TransactionalTopologyBuilder(String id, String spoutId,
- IPartitionedTransactionalSpout spout) {
- this(id, spoutId, spout, null);
- }
-
- public TransactionalTopologyBuilder(String id, String spoutId,
- IOpaquePartitionedTransactionalSpout spout, Number spoutParallelism) {
- this(id, spoutId,
- new OpaquePartitionedTransactionalSpoutExecutor(spout),
- spoutParallelism);
- }
-
- public TransactionalTopologyBuilder(String id, String spoutId,
- IOpaquePartitionedTransactionalSpout spout) {
- this(id, spoutId, spout, null);
- }
-
- public SpoutDeclarer getSpoutDeclarer() {
- return new SpoutDeclarerImpl();
- }
-
- public BoltDeclarer setBolt(String id, IBatchBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- public BoltDeclarer setBolt(String id, IBatchBolt bolt, Number parallelism) {
- return setBolt(id, new BatchBoltExecutor(bolt), parallelism,
- bolt instanceof ICommitter);
- }
-
- public BoltDeclarer setCommitterBolt(String id, IBatchBolt bolt) {
- return setCommitterBolt(id, bolt, null);
- }
-
- public BoltDeclarer setCommitterBolt(String id, IBatchBolt bolt,
- Number parallelism) {
- return setBolt(id, new BatchBoltExecutor(bolt), parallelism, true);
- }
-
- public BoltDeclarer setBolt(String id, IBasicBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- public BoltDeclarer setBolt(String id, IBasicBolt bolt, Number parallelism) {
- return setBolt(id, new BasicBoltExecutor(bolt), parallelism, false);
- }
-
- private BoltDeclarer setBolt(String id, IRichBolt bolt, Number parallelism,
- boolean committer) {
- Integer p = null;
- if (parallelism != null)
- p = parallelism.intValue();
- Component component = new Component(bolt, p, committer);
- _bolts.put(id, component);
- return new BoltDeclarerImpl(component);
- }
-
- public TopologyBuilder buildTopologyBuilder() {
- String coordinator = _spoutId + "/coordinator";
- TopologyBuilder builder = new TopologyBuilder();
- SpoutDeclarer declarer = builder.setSpout(coordinator,
- new TransactionalSpoutCoordinator(_spout));
- for (Map conf : _spoutConfs) {
- declarer.addConfigurations(conf);
- }
- declarer.addConfiguration(Config.TOPOLOGY_TRANSACTIONAL_ID, _id);
-
- BoltDeclarer emitterDeclarer = builder
- .setBolt(
- _spoutId,
- new CoordinatedBolt(
- new TransactionalSpoutBatchExecutor(_spout),
- null, null), _spoutParallelism)
- .allGrouping(
- coordinator,
- TransactionalSpoutCoordinator.TRANSACTION_BATCH_STREAM_ID)
- .addConfiguration(Config.TOPOLOGY_TRANSACTIONAL_ID, _id);
- if (_spout instanceof ICommitterTransactionalSpout) {
- emitterDeclarer.allGrouping(coordinator,
- TransactionalSpoutCoordinator.TRANSACTION_COMMIT_STREAM_ID);
- }
- for (String id : _bolts.keySet()) {
- Component component = _bolts.get(id);
- Map<String, SourceArgs> coordinatedArgs = new HashMap<String, SourceArgs>();
- // get all source component
- for (String c : componentBoltSubscriptions(component)) {
- coordinatedArgs.put(c, SourceArgs.all());
- }
-
- IdStreamSpec idSpec = null;
- if (component.committer) {
- idSpec = IdStreamSpec
- .makeDetectSpec(
- coordinator,
- TransactionalSpoutCoordinator.TRANSACTION_COMMIT_STREAM_ID);
- }
- BoltDeclarer input = builder.setBolt(id, new CoordinatedBolt(
- component.bolt, coordinatedArgs, idSpec),
- component.parallelism);
- for (Map conf : component.componentConfs) {
- input.addConfigurations(conf);
- }
- for (String c : componentBoltSubscriptions(component)) {
- input.directGrouping(c, Constants.COORDINATED_STREAM_ID);
- }
- for (InputDeclaration d : component.declarations) {
- d.declare(input);
- }
- if (component.committer) {
- input.allGrouping(
- coordinator,
- TransactionalSpoutCoordinator.TRANSACTION_COMMIT_STREAM_ID);
- }
- }
- return builder;
- }
-
- public StormTopology buildTopology() {
- return buildTopologyBuilder().createTopology();
- }
-
- private Set<String> componentBoltSubscriptions(Component component) {
- Set<String> ret = new HashSet<String>();
- for (InputDeclaration d : component.declarations) {
- ret.add(d.getComponent());
- }
- return ret;
- }
-
- private static class Component {
- public IRichBolt bolt;
- public Integer parallelism;
- public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
- public List<Map> componentConfs = new ArrayList<Map>();
- public boolean committer;
-
- public Component(IRichBolt bolt, Integer parallelism, boolean committer) {
- this.bolt = bolt;
- this.parallelism = parallelism;
- this.committer = committer;
- }
- }
-
- private static interface InputDeclaration {
- void declare(InputDeclarer declarer);
-
- String getComponent();
- }
-
- private class SpoutDeclarerImpl extends
- BaseConfigurationDeclarer<SpoutDeclarer> implements SpoutDeclarer {
- @Override
- public SpoutDeclarer addConfigurations(Map conf) {
- _spoutConfs.add(conf);
- return this;
- }
- }
-
- private class BoltDeclarerImpl extends
- BaseConfigurationDeclarer<BoltDeclarer> implements BoltDeclarer {
- Component _component;
-
- public BoltDeclarerImpl(Component component) {
- _component = component;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component,
- final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component,
- final String streamId, final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, streamId, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component,
- final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component,
- final String streamId, final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, streamId, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer grouping(final GlobalStreamId stream,
- final Grouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.grouping(stream, grouping);
- }
-
- @Override
- public String getComponent() {
- return stream.get_componentId();
- }
- });
- return this;
- }
-
- private void addDeclaration(InputDeclaration declaration) {
- _component.declarations.add(declaration);
- }
-
- @Override
- public BoltDeclarer addConfigurations(Map conf) {
- _component.componentConfs.add(conf);
- return this;
- }
-
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IOpaquePartitionedTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IOpaquePartitionedTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IOpaquePartitionedTransactionalSpout.java
deleted file mode 100644
index 65c0772..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IOpaquePartitionedTransactionalSpout.java
+++ /dev/null
@@ -1,46 +0,0 @@
-package backtype.storm.transactional.partitioned;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IComponent;
-import backtype.storm.transactional.TransactionAttempt;
-import java.util.Map;
-
-/**
- * This defines a transactional spout which does *not* necessarily replay the
- * same batch every time it emits a batch for a transaction id.
- */
-public interface IOpaquePartitionedTransactionalSpout<T> extends IComponent {
- public interface Coordinator {
- /**
- * Returns true if its ok to emit start a new transaction, false
- * otherwise (will skip this transaction).
- *
- * You should sleep here if you want a delay between asking for the next
- * transaction (this will be called repeatedly in a loop).
- */
- boolean isReady();
-
- void close();
- }
-
- public interface Emitter<X> {
- /**
- * Emit a batch of tuples for a partition/transaction.
- *
- * Return the metadata describing this batch that will be used as
- * lastPartitionMeta for defining the parameters of the next batch.
- */
- X emitPartitionBatch(TransactionAttempt tx,
- BatchOutputCollector collector, int partition,
- X lastPartitionMeta);
-
- int numPartitions();
-
- void close();
- }
-
- Emitter<T> getEmitter(Map conf, TopologyContext context);
-
- Coordinator getCoordinator(Map conf, TopologyContext context);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IPartitionedTransactionalSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IPartitionedTransactionalSpout.java b/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IPartitionedTransactionalSpout.java
deleted file mode 100644
index 31e4c41..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/IPartitionedTransactionalSpout.java
+++ /dev/null
@@ -1,60 +0,0 @@
-package backtype.storm.transactional.partitioned;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IComponent;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.coordination.BatchOutputCollector;
-import java.util.Map;
-
-/**
- * This interface defines a transactional spout that reads its tuples from a
- * partitioned set of brokers. It automates the storing of metadata for each
- * partition to ensure that the same batch is always emitted for the same
- * transaction id. The partition metadata is stored in Zookeeper.
- */
-public interface IPartitionedTransactionalSpout<T> extends IComponent {
- public interface Coordinator {
- /**
- * Return the number of partitions currently in the source of data. The
- * idea is is that if a new partition is added and a prior transaction
- * is replayed, it doesn't emit tuples for the new partition because it
- * knows how many partitions were in that transaction.
- */
- int numPartitions();
-
- /**
- * Returns true if its ok to emit start a new transaction, false
- * otherwise (will skip this transaction).
- *
- * You should sleep here if you want a delay between asking for the next
- * transaction (this will be called repeatedly in a loop).
- */
- boolean isReady();
-
- void close();
- }
-
- public interface Emitter<X> {
- /**
- * Emit a batch of tuples for a partition/transaction that's never been
- * emitted before. Return the metadata that can be used to reconstruct
- * this partition/batch in the future.
- */
- X emitPartitionBatchNew(TransactionAttempt tx,
- BatchOutputCollector collector, int partition,
- X lastPartitionMeta);
-
- /**
- * Emit a batch of tuples for a partition/transaction that has been
- * emitted before, using the metadata created when it was first emitted.
- */
- void emitPartitionBatch(TransactionAttempt tx,
- BatchOutputCollector collector, int partition, X partitionMeta);
-
- void close();
- }
-
- Coordinator getCoordinator(Map conf, TopologyContext context);
-
- Emitter<T> getEmitter(Map conf, TopologyContext context);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/OpaquePartitionedTransactionalSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/OpaquePartitionedTransactionalSpoutExecutor.java b/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/OpaquePartitionedTransactionalSpoutExecutor.java
deleted file mode 100644
index 4bc877f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/OpaquePartitionedTransactionalSpoutExecutor.java
+++ /dev/null
@@ -1,153 +0,0 @@
-package backtype.storm.transactional.partitioned;
-
-import backtype.storm.Config;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.transactional.ICommitterTransactionalSpout;
-import backtype.storm.transactional.ITransactionalSpout;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.transactional.state.RotatingTransactionalState;
-import backtype.storm.transactional.state.TransactionalState;
-import java.math.BigInteger;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.TreeMap;
-
-public class OpaquePartitionedTransactionalSpoutExecutor implements
- ICommitterTransactionalSpout<Object> {
- IOpaquePartitionedTransactionalSpout _spout;
-
- public class Coordinator implements ITransactionalSpout.Coordinator<Object> {
- IOpaquePartitionedTransactionalSpout.Coordinator _coordinator;
-
- public Coordinator(Map conf, TopologyContext context) {
- _coordinator = _spout.getCoordinator(conf, context);
- }
-
- @Override
- public Object initializeTransaction(BigInteger txid, Object prevMetadata) {
- return null;
- }
-
- @Override
- public boolean isReady() {
- return _coordinator.isReady();
- }
-
- @Override
- public void close() {
- _coordinator.close();
- }
- }
-
- public class Emitter implements ICommitterTransactionalSpout.Emitter {
- IOpaquePartitionedTransactionalSpout.Emitter _emitter;
- TransactionalState _state;
- TreeMap<BigInteger, Map<Integer, Object>> _cachedMetas = new TreeMap<BigInteger, Map<Integer, Object>>();
- Map<Integer, RotatingTransactionalState> _partitionStates = new HashMap<Integer, RotatingTransactionalState>();
- int _index;
- int _numTasks;
-
- public Emitter(Map conf, TopologyContext context) {
- _emitter = _spout.getEmitter(conf, context);
- _index = context.getThisTaskIndex();
- _numTasks = context.getComponentTasks(context.getThisComponentId())
- .size();
- _state = TransactionalState.newUserState(conf,
- (String) conf.get(Config.TOPOLOGY_TRANSACTIONAL_ID),
- getComponentConfiguration());
- List<String> existingPartitions = _state.list("");
- for (String p : existingPartitions) {
- int partition = Integer.parseInt(p);
- if ((partition - _index) % _numTasks == 0) {
- _partitionStates.put(partition,
- new RotatingTransactionalState(_state, p));
- }
- }
- }
-
- @Override
- public void emitBatch(TransactionAttempt tx, Object coordinatorMeta,
- BatchOutputCollector collector) {
- Map<Integer, Object> metas = new HashMap<Integer, Object>();
- _cachedMetas.put(tx.getTransactionId(), metas);
- int partitions = _emitter.numPartitions();
- Entry<BigInteger, Map<Integer, Object>> entry = _cachedMetas
- .lowerEntry(tx.getTransactionId());
- Map<Integer, Object> prevCached;
- if (entry != null) {
- prevCached = entry.getValue();
- } else {
- prevCached = new HashMap<Integer, Object>();
- }
-
- for (int i = _index; i < partitions; i += _numTasks) {
- RotatingTransactionalState state = _partitionStates.get(i);
- if (state == null) {
- state = new RotatingTransactionalState(_state, "" + i);
- _partitionStates.put(i, state);
- }
- state.removeState(tx.getTransactionId());
- Object lastMeta = prevCached.get(i);
- if (lastMeta == null)
- lastMeta = state.getLastState();
- Object meta = _emitter.emitPartitionBatch(tx, collector, i,
- lastMeta);
- metas.put(i, meta);
- }
- }
-
- @Override
- public void cleanupBefore(BigInteger txid) {
- for (RotatingTransactionalState state : _partitionStates.values()) {
- state.cleanupBefore(txid);
- }
- }
-
- @Override
- public void commit(TransactionAttempt attempt) {
- BigInteger txid = attempt.getTransactionId();
- Map<Integer, Object> metas = _cachedMetas.remove(txid);
- for (Integer partition : metas.keySet()) {
- Object meta = metas.get(partition);
- _partitionStates.get(partition).overrideState(txid, meta);
- }
- }
-
- @Override
- public void close() {
- _emitter.close();
- }
- }
-
- public OpaquePartitionedTransactionalSpoutExecutor(
- IOpaquePartitionedTransactionalSpout spout) {
- _spout = spout;
- }
-
- @Override
- public ITransactionalSpout.Coordinator<Object> getCoordinator(Map conf,
- TopologyContext context) {
- return new Coordinator(conf, context);
- }
-
- @Override
- public ICommitterTransactionalSpout.Emitter getEmitter(Map conf,
- TopologyContext context) {
- return new Emitter(conf, context);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _spout.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/PartitionedTransactionalSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/PartitionedTransactionalSpoutExecutor.java b/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/PartitionedTransactionalSpoutExecutor.java
deleted file mode 100644
index 51bb34e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/partitioned/PartitionedTransactionalSpoutExecutor.java
+++ /dev/null
@@ -1,136 +0,0 @@
-package backtype.storm.transactional.partitioned;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.transactional.ITransactionalSpout;
-import backtype.storm.transactional.TransactionAttempt;
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.transactional.state.RotatingTransactionalState;
-import backtype.storm.transactional.state.TransactionalState;
-import java.math.BigInteger;
-import java.util.HashMap;
-import java.util.Map;
-
-public class PartitionedTransactionalSpoutExecutor implements
- ITransactionalSpout<Integer> {
- IPartitionedTransactionalSpout _spout;
-
- public PartitionedTransactionalSpoutExecutor(
- IPartitionedTransactionalSpout spout) {
- _spout = spout;
- }
-
- public IPartitionedTransactionalSpout getPartitionedSpout() {
- return _spout;
- }
-
- class Coordinator implements ITransactionalSpout.Coordinator<Integer> {
- private IPartitionedTransactionalSpout.Coordinator _coordinator;
-
- public Coordinator(Map conf, TopologyContext context) {
- _coordinator = _spout.getCoordinator(conf, context);
- }
-
- @Override
- public Integer initializeTransaction(BigInteger txid,
- Integer prevMetadata) {
- return _coordinator.numPartitions();
- }
-
- @Override
- public boolean isReady() {
- return _coordinator.isReady();
- }
-
- @Override
- public void close() {
- _coordinator.close();
- }
- }
-
- class Emitter implements ITransactionalSpout.Emitter<Integer> {
- private IPartitionedTransactionalSpout.Emitter _emitter;
- private TransactionalState _state;
- private Map<Integer, RotatingTransactionalState> _partitionStates = new HashMap<Integer, RotatingTransactionalState>();
- private int _index;
- private int _numTasks;
-
- public Emitter(Map conf, TopologyContext context) {
- _emitter = _spout.getEmitter(conf, context);
- _state = TransactionalState.newUserState(conf,
- (String) conf.get(Config.TOPOLOGY_TRANSACTIONAL_ID),
- getComponentConfiguration());
- _index = context.getThisTaskIndex();
- _numTasks = context.getComponentTasks(context.getThisComponentId())
- .size();
- }
-
- @Override
- public void emitBatch(final TransactionAttempt tx,
- final Integer partitions, final BatchOutputCollector collector) {
- for (int i = _index; i < partitions; i += _numTasks) {
- if (!_partitionStates.containsKey(i)) {
- _partitionStates.put(i, new RotatingTransactionalState(
- _state, "" + i));
- }
- RotatingTransactionalState state = _partitionStates.get(i);
- final int partition = i;
- Object meta = state.getStateOrCreate(tx.getTransactionId(),
- new RotatingTransactionalState.StateInitializer() {
- @Override
- public Object init(BigInteger txid, Object lastState) {
- return _emitter.emitPartitionBatchNew(tx,
- collector, partition, lastState);
- }
- });
- // it's null if one of:
- // a) a later transaction batch was emitted before this, so we
- // should skip this batch
- // b) if didn't exist and was created (in which case the
- // StateInitializer was invoked and
- // it was emitted
- if (meta != null) {
- _emitter.emitPartitionBatch(tx, collector, partition, meta);
- }
- }
-
- }
-
- @Override
- public void cleanupBefore(BigInteger txid) {
- for (RotatingTransactionalState state : _partitionStates.values()) {
- state.cleanupBefore(txid);
- }
- }
-
- @Override
- public void close() {
- _state.close();
- _emitter.close();
- }
- }
-
- @Override
- public ITransactionalSpout.Coordinator getCoordinator(Map conf,
- TopologyContext context) {
- return new Coordinator(conf, context);
- }
-
- @Override
- public ITransactionalSpout.Emitter getEmitter(Map conf,
- TopologyContext context) {
- return new Emitter(conf, context);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _spout.declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/state/RotatingTransactionalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/state/RotatingTransactionalState.java b/jstorm-client/src/main/java/backtype/storm/transactional/state/RotatingTransactionalState.java
deleted file mode 100644
index 2ee9f85..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/state/RotatingTransactionalState.java
+++ /dev/null
@@ -1,143 +0,0 @@
-package backtype.storm.transactional.state;
-
-import backtype.storm.transactional.TransactionalSpoutCoordinator;
-
-import java.math.BigInteger;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.SortedMap;
-import java.util.TreeMap;
-
-/**
- * A map from txid to a value. Automatically deletes txids that have been
- * committed.
- */
-public class RotatingTransactionalState {
- public static interface StateInitializer {
- Object init(BigInteger txid, Object lastState);
- }
-
- private TransactionalState _state;
- private String _subdir;
- private boolean _strictOrder;
-
- private TreeMap<BigInteger, Object> _curr = new TreeMap<BigInteger, Object>();
-
- public RotatingTransactionalState(TransactionalState state, String subdir,
- boolean strictOrder) {
- _state = state;
- _subdir = subdir;
- _strictOrder = strictOrder;
- state.mkdir(subdir);
- sync();
- }
-
- public RotatingTransactionalState(TransactionalState state, String subdir) {
- this(state, subdir, false);
- }
-
- public Object getLastState() {
- if (_curr.isEmpty())
- return null;
- else
- return _curr.lastEntry().getValue();
- }
-
- public void overrideState(BigInteger txid, Object state) {
- _state.setData(txPath(txid), state);
- _curr.put(txid, state);
- }
-
- public void removeState(BigInteger txid) {
- if (_curr.containsKey(txid)) {
- _curr.remove(txid);
- _state.delete(txPath(txid));
- }
- }
-
- public Object getState(BigInteger txid, StateInitializer init) {
- if (!_curr.containsKey(txid)) {
- SortedMap<BigInteger, Object> prevMap = _curr.headMap(txid);
- SortedMap<BigInteger, Object> afterMap = _curr.tailMap(txid);
-
- BigInteger prev = null;
- if (!prevMap.isEmpty())
- prev = prevMap.lastKey();
-
- if (_strictOrder) {
- if (prev == null
- && !txid.equals(TransactionalSpoutCoordinator.INIT_TXID)) {
- throw new IllegalStateException(
- "Trying to initialize transaction for which there should be a previous state");
- }
- if (prev != null && !prev.equals(txid.subtract(BigInteger.ONE))) {
- throw new IllegalStateException(
- "Expecting previous txid state to be the previous transaction");
- }
- if (!afterMap.isEmpty()) {
- throw new IllegalStateException(
- "Expecting tx state to be initialized in strict order but there are txids after that have state");
- }
- }
-
- Object data;
- if (afterMap.isEmpty()) {
- Object prevData;
- if (prev != null) {
- prevData = _curr.get(prev);
- } else {
- prevData = null;
- }
- data = init.init(txid, prevData);
- } else {
- data = null;
- }
- _curr.put(txid, data);
- _state.setData(txPath(txid), data);
- }
- return _curr.get(txid);
- }
-
- public boolean hasCache(BigInteger txid) {
- return _curr.containsKey(txid);
- }
-
- /**
- * Returns null if it was created, the value otherwise.
- */
- public Object getStateOrCreate(BigInteger txid, StateInitializer init) {
- if (_curr.containsKey(txid)) {
- return _curr.get(txid);
- } else {
- getState(txid, init);
- return null;
- }
- }
-
- public void cleanupBefore(BigInteger txid) {
- Set<BigInteger> toDelete = new HashSet<BigInteger>();
- toDelete.addAll(_curr.headMap(txid).keySet());
- for (BigInteger tx : toDelete) {
- _curr.remove(tx);
- _state.delete(txPath(tx));
- }
- }
-
- private void sync() {
- List<String> txids = _state.list(_subdir);
- for (String txid_s : txids) {
- Object data = _state.getData(txPath(txid_s));
- _curr.put(new BigInteger(txid_s), data);
- }
- }
-
- private String txPath(BigInteger tx) {
- return txPath(tx.toString());
- }
-
- private String txPath(String tx) {
- return _subdir + "/" + tx;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/transactional/state/TransactionalState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/transactional/state/TransactionalState.java b/jstorm-client/src/main/java/backtype/storm/transactional/state/TransactionalState.java
deleted file mode 100644
index 11b8359..0000000
--- a/jstorm-client/src/main/java/backtype/storm/transactional/state/TransactionalState.java
+++ /dev/null
@@ -1,132 +0,0 @@
-package backtype.storm.transactional.state;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.curator.framework.CuratorFramework;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.KeeperException;
-
-import backtype.storm.Config;
-import backtype.storm.serialization.KryoValuesDeserializer;
-import backtype.storm.serialization.KryoValuesSerializer;
-import backtype.storm.utils.Utils;
-
-public class TransactionalState {
- CuratorFramework _curator;
- KryoValuesSerializer _ser;
- KryoValuesDeserializer _des;
-
- public static TransactionalState newUserState(Map conf, String id,
- Map componentConf) {
- return new TransactionalState(conf, id, componentConf, "user");
- }
-
- public static TransactionalState newCoordinatorState(Map conf, String id,
- Map componentConf) {
- return new TransactionalState(conf, id, componentConf, "coordinator");
- }
-
- protected TransactionalState(Map conf, String id, Map componentConf,
- String subroot) {
- try {
- conf = new HashMap(conf);
- // ensure that the serialization registrations are consistent with
- // the declarations in this spout
- if (componentConf != null) {
- conf.put(Config.TOPOLOGY_KRYO_REGISTER,
- componentConf.get(Config.TOPOLOGY_KRYO_REGISTER));
- }
- String rootDir = conf.get(Config.TRANSACTIONAL_ZOOKEEPER_ROOT)
- + "/" + id + "/" + subroot;
- List<String> servers = (List<String>) getWithBackup(conf,
- Config.TRANSACTIONAL_ZOOKEEPER_SERVERS,
- Config.STORM_ZOOKEEPER_SERVERS);
- Object port = getWithBackup(conf,
- Config.TRANSACTIONAL_ZOOKEEPER_PORT,
- Config.STORM_ZOOKEEPER_PORT);
- CuratorFramework initter = Utils.newCuratorStarted(conf, servers,
- port);
- try {
- initter.create().creatingParentsIfNeeded().forPath(rootDir);
- } catch (KeeperException.NodeExistsException e) {
-
- }
-
- initter.close();
-
- _curator = Utils.newCuratorStarted(conf, servers, port, rootDir);
- _ser = new KryoValuesSerializer(conf);
- _des = new KryoValuesDeserializer(conf);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void setData(String path, Object obj) {
- path = "/" + path;
- byte[] ser = _ser.serializeObject(obj);
- try {
- if (_curator.checkExists().forPath(path) != null) {
- _curator.setData().forPath(path, ser);
- } else {
- _curator.create().creatingParentsIfNeeded()
- .withMode(CreateMode.PERSISTENT).forPath(path, ser);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void delete(String path) {
- path = "/" + path;
- try {
- _curator.delete().forPath(path);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public List<String> list(String path) {
- path = "/" + path;
- try {
- if (_curator.checkExists().forPath(path) == null) {
- return new ArrayList<String>();
- } else {
- return _curator.getChildren().forPath(path);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void mkdir(String path) {
- setData(path, 7);
- }
-
- public Object getData(String path) {
- path = "/" + path;
- try {
- if (_curator.checkExists().forPath(path) != null) {
- return _des.deserializeObject(_curator.getData().forPath(path));
- } else {
- return null;
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- public void close() {
- _curator.close();
- }
-
- private Object getWithBackup(Map amap, Object primary, Object backup) {
- Object ret = amap.get(primary);
- if (ret == null)
- return amap.get(backup);
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/Fields.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/Fields.java b/jstorm-client/src/main/java/backtype/storm/tuple/Fields.java
deleted file mode 100644
index dc9b8bf..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/Fields.java
+++ /dev/null
@@ -1,82 +0,0 @@
-package backtype.storm.tuple;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.io.Serializable;
-
-public class Fields implements Iterable<String>, Serializable {
- private List<String> _fields;
- private Map<String, Integer> _index = new HashMap<String, Integer>();
-
- public Fields(String... fields) {
- this(Arrays.asList(fields));
- }
-
- public Fields(List<String> fields) {
- _fields = new ArrayList<String>(fields.size());
- for (String field : fields) {
- if (_fields.contains(field))
- throw new IllegalArgumentException(String.format(
- "duplicate field '%s'", field));
- _fields.add(field);
- }
- index();
- }
-
- public List<Object> select(Fields selector, List<Object> tuple) {
- List<Object> ret = new ArrayList<Object>(selector.size());
- for (String s : selector) {
- ret.add(tuple.get(_index.get(s)));
- }
- return ret;
- }
-
- public List<String> toList() {
- return new ArrayList<String>(_fields);
- }
-
- public int size() {
- return _fields.size();
- }
-
- public String get(int index) {
- return _fields.get(index);
- }
-
- public Iterator<String> iterator() {
- return _fields.iterator();
- }
-
- /**
- * Returns the position of the specified field.
- */
- public int fieldIndex(String field) {
- Integer ret = _index.get(field);
- if (ret == null) {
- throw new IllegalArgumentException(field + " does not exist");
- }
- return ret;
- }
-
- /**
- * Returns true if this contains the specified name of the field.
- */
- public boolean contains(String field) {
- return _index.containsKey(field);
- }
-
- private void index() {
- for (int i = 0; i < _fields.size(); i++) {
- _index.put(_fields.get(i), i);
- }
- }
-
- @Override
- public String toString() {
- return _fields.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/ITuple.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/ITuple.java b/jstorm-client/src/main/java/backtype/storm/tuple/ITuple.java
deleted file mode 100644
index b00279d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/ITuple.java
+++ /dev/null
@@ -1,119 +0,0 @@
-package backtype.storm.tuple;
-
-import java.util.List;
-
-public interface ITuple {
-
- /**
- * Returns the number of fields in this tuple.
- */
- public int size();
-
- /**
- * Returns true if this tuple contains the specified name of the field.
- */
- public boolean contains(String field);
-
- /**
- * Gets the names of the fields in this tuple.
- */
- public Fields getFields();
-
- /**
- * Returns the position of the specified field in this tuple.
- */
- public int fieldIndex(String field);
-
- /**
- * Returns a subset of the tuple based on the fields selector.
- */
- public List<Object> select(Fields selector);
-
- /**
- * Gets the field at position i in the tuple. Returns object since tuples are dynamically typed.
- */
- public Object getValue(int i);
-
- /**
- * Returns the String at position i in the tuple. If that field is not a String,
- * you will get a runtime error.
- */
- public String getString(int i);
-
- /**
- * Returns the Integer at position i in the tuple. If that field is not an Integer,
- * you will get a runtime error.
- */
- public Integer getInteger(int i);
-
- /**
- * Returns the Long at position i in the tuple. If that field is not a Long,
- * you will get a runtime error.
- */
- public Long getLong(int i);
-
- /**
- * Returns the Boolean at position i in the tuple. If that field is not a Boolean,
- * you will get a runtime error.
- */
- public Boolean getBoolean(int i);
-
- /**
- * Returns the Short at position i in the tuple. If that field is not a Short,
- * you will get a runtime error.
- */
- public Short getShort(int i);
-
- /**
- * Returns the Byte at position i in the tuple. If that field is not a Byte,
- * you will get a runtime error.
- */
- public Byte getByte(int i);
-
- /**
- * Returns the Double at position i in the tuple. If that field is not a Double,
- * you will get a runtime error.
- */
- public Double getDouble(int i);
-
- /**
- * Returns the Float at position i in the tuple. If that field is not a Float,
- * you will get a runtime error.
- */
- public Float getFloat(int i);
-
- /**
- * Returns the byte array at position i in the tuple. If that field is not a byte array,
- * you will get a runtime error.
- */
- public byte[] getBinary(int i);
-
-
- public Object getValueByField(String field);
-
- public String getStringByField(String field);
-
- public Integer getIntegerByField(String field);
-
- public Long getLongByField(String field);
-
- public Boolean getBooleanByField(String field);
-
- public Short getShortByField(String field);
-
- public Byte getByteByField(String field);
-
- public Double getDoubleByField(String field);
-
- public Float getFloatByField(String field);
-
- public byte[] getBinaryByField(String field);
-
- /**
- * Gets all the values in this tuple.
- */
- public List<Object> getValues();
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/MessageId.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/MessageId.java b/jstorm-client/src/main/java/backtype/storm/tuple/MessageId.java
deleted file mode 100644
index b1bd68a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/MessageId.java
+++ /dev/null
@@ -1,86 +0,0 @@
-package backtype.storm.tuple;
-
-import backtype.storm.utils.Utils;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Random;
-import java.util.Set;
-
-public class MessageId {
- private Map<Long, Long> _anchorsToIds;
-
- @Deprecated
- public static long generateId() {
- return Utils.secureRandomLong();
- }
-
- public static long generateId(Random rand) {
- return rand.nextLong();
- }
-
- public static MessageId makeUnanchored() {
- return makeId(new HashMap<Long, Long>());
- }
-
- public static MessageId makeId(Map<Long, Long> anchorsToIds) {
- return new MessageId(anchorsToIds);
- }
-
- public static MessageId makeRootId(long id, long val) {
- Map<Long, Long> anchorsToIds = new HashMap<Long, Long>();
- anchorsToIds.put(id, val);
- return new MessageId(anchorsToIds);
- }
-
- protected MessageId(Map<Long, Long> anchorsToIds) {
- _anchorsToIds = anchorsToIds;
- }
-
- public Map<Long, Long> getAnchorsToIds() {
- return _anchorsToIds;
- }
-
- public Set<Long> getAnchors() {
- return _anchorsToIds.keySet();
- }
-
- @Override
- public int hashCode() {
- return _anchorsToIds.hashCode();
- }
-
- @Override
- public boolean equals(Object other) {
- if (other instanceof MessageId) {
- return _anchorsToIds.equals(((MessageId) other)._anchorsToIds);
- } else {
- return false;
- }
- }
-
- @Override
- public String toString() {
- return _anchorsToIds.toString();
- }
-
- public void serialize(Output out) throws IOException {
- out.writeInt(_anchorsToIds.size(), true);
- for (Entry<Long, Long> anchorToId : _anchorsToIds.entrySet()) {
- out.writeLong(anchorToId.getKey());
- out.writeLong(anchorToId.getValue());
- }
- }
-
- public static MessageId deserialize(Input in) throws IOException {
- int numAnchors = in.readInt(true);
- Map<Long, Long> anchorsToIds = new HashMap<Long, Long>();
- for (int i = 0; i < numAnchors; i++) {
- anchorsToIds.put(in.readLong(), in.readLong());
- }
- return new MessageId(anchorsToIds);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/Tuple.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/Tuple.java b/jstorm-client/src/main/java/backtype/storm/tuple/Tuple.java
deleted file mode 100644
index f170cd2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/Tuple.java
+++ /dev/null
@@ -1,43 +0,0 @@
-package backtype.storm.tuple;
-
-import backtype.storm.generated.GlobalStreamId;
-import java.util.List;
-
-/**
- * The tuple is the main data structure in Storm. A tuple is a named list of values,
- * where each value can be any type. Tuples are dynamically typed -- the types of the fields
- * do not need to be declared. Tuples have helper methods like getInteger and getString
- * to get field values without having to cast the result.
- *
- * Storm needs to know how to serialize all the values in a tuple. By default, Storm
- * knows how to serialize the primitive types, strings, and byte arrays. If you want to
- * use another type, you'll need to implement and register a serializer for that type.
- * See {@link http://github.com/nathanmarz/storm/wiki/Serialization} for more info.
- */
-public interface Tuple extends ITuple{
-
- /**
- * Returns the global stream id (component + stream) of this tuple.
- */
- public GlobalStreamId getSourceGlobalStreamid();
-
- /**
- * Gets the id of the component that created this tuple.
- */
- public String getSourceComponent();
-
- /**
- * Gets the id of the task that created this tuple.
- */
- public int getSourceTask();
-
- /**
- * Gets the id of the stream that this tuple was emitted to.
- */
- public String getSourceStreamId();
-
- /**
- * Gets the message id that associated with this tuple.
- */
- public MessageId getMessageId();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/TupleExt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/TupleExt.java b/jstorm-client/src/main/java/backtype/storm/tuple/TupleExt.java
deleted file mode 100644
index 7307342..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/TupleExt.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package backtype.storm.tuple;
-
-public interface TupleExt extends Tuple {
- /**
- * Get Target TaskId
- *
- * @return
- */
- int getTargetTaskId();
-
- void setTargetTaskId(int targetTaskId);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/TupleImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/TupleImpl.java b/jstorm-client/src/main/java/backtype/storm/tuple/TupleImpl.java
deleted file mode 100644
index 2f47f6e..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/TupleImpl.java
+++ /dev/null
@@ -1,342 +0,0 @@
-package backtype.storm.tuple;
-
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.task.GeneralTopologyContext;
-import backtype.storm.utils.IndifferentAccessMap;
-import clojure.lang.ASeq;
-import clojure.lang.Counted;
-import clojure.lang.IMeta;
-import clojure.lang.IPersistentMap;
-import clojure.lang.ISeq;
-import clojure.lang.Indexed;
-import clojure.lang.Keyword;
-import clojure.lang.MapEntry;
-import clojure.lang.Obj;
-import clojure.lang.PersistentArrayMap;
-import clojure.lang.Seqable;
-import clojure.lang.Symbol;
-import java.util.List;
-
-public class TupleImpl extends IndifferentAccessMap implements Seqable,
- Indexed, IMeta, Tuple {
- private List<Object> values;
- private int taskId;
- private String streamId;
- private GeneralTopologyContext context;
- private MessageId id;
- private IPersistentMap _meta = null;
-
- public TupleImpl(GeneralTopologyContext context, List<Object> values,
- int taskId, String streamId, MessageId id) {
- this.values = values;
- this.taskId = taskId;
- this.streamId = streamId;
- this.id = id;
- this.context = context;
-
- String componentId = context.getComponentId(taskId);
- Fields schema = context.getComponentOutputFields(componentId, streamId);
- if (values.size() != schema.size()) {
- throw new IllegalArgumentException(
- "Tuple created with wrong number of fields. " + "Expected "
- + schema.size() + " fields but got "
- + values.size() + " fields");
- }
- }
-
- public TupleImpl(GeneralTopologyContext context, List<Object> values,
- int taskId, String streamId) {
- this(context, values, taskId, streamId, MessageId.makeUnanchored());
- }
-
- Long _processSampleStartTime = null;
- Long _executeSampleStartTime = null;
-
- public void setProcessSampleStartTime(long ms) {
- _processSampleStartTime = ms;
- }
-
- public Long getProcessSampleStartTime() {
- return _processSampleStartTime;
- }
-
- public void setExecuteSampleStartTime(long ms) {
- _executeSampleStartTime = ms;
- }
-
- public Long getExecuteSampleStartTime() {
- return _executeSampleStartTime;
- }
-
- long _outAckVal = 0;
-
- public void updateAckVal(long val) {
- _outAckVal = _outAckVal ^ val;
- }
-
- public long getAckVal() {
- return _outAckVal;
- }
-
- public int size() {
- return values.size();
- }
-
- public int fieldIndex(String field) {
- return getFields().fieldIndex(field);
- }
-
- public boolean contains(String field) {
- return getFields().contains(field);
- }
-
- public Object getValue(int i) {
- return values.get(i);
- }
-
- public String getString(int i) {
- return (String) values.get(i);
- }
-
- public Integer getInteger(int i) {
- return (Integer) values.get(i);
- }
-
- public Long getLong(int i) {
- return (Long) values.get(i);
- }
-
- public Boolean getBoolean(int i) {
- return (Boolean) values.get(i);
- }
-
- public Short getShort(int i) {
- return (Short) values.get(i);
- }
-
- public Byte getByte(int i) {
- return (Byte) values.get(i);
- }
-
- public Double getDouble(int i) {
- return (Double) values.get(i);
- }
-
- public Float getFloat(int i) {
- return (Float) values.get(i);
- }
-
- public byte[] getBinary(int i) {
- return (byte[]) values.get(i);
- }
-
- public Object getValueByField(String field) {
- return values.get(fieldIndex(field));
- }
-
- public String getStringByField(String field) {
- return (String) values.get(fieldIndex(field));
- }
-
- public Integer getIntegerByField(String field) {
- return (Integer) values.get(fieldIndex(field));
- }
-
- public Long getLongByField(String field) {
- return (Long) values.get(fieldIndex(field));
- }
-
- public Boolean getBooleanByField(String field) {
- return (Boolean) values.get(fieldIndex(field));
- }
-
- public Short getShortByField(String field) {
- return (Short) values.get(fieldIndex(field));
- }
-
- public Byte getByteByField(String field) {
- return (Byte) values.get(fieldIndex(field));
- }
-
- public Double getDoubleByField(String field) {
- return (Double) values.get(fieldIndex(field));
- }
-
- public Float getFloatByField(String field) {
- return (Float) values.get(fieldIndex(field));
- }
-
- public byte[] getBinaryByField(String field) {
- return (byte[]) values.get(fieldIndex(field));
- }
-
- public List<Object> getValues() {
- return values;
- }
-
- public Fields getFields() {
- return context.getComponentOutputFields(getSourceComponent(),
- getSourceStreamId());
- }
-
- public List<Object> select(Fields selector) {
- return getFields().select(selector, values);
- }
-
- public GlobalStreamId getSourceGlobalStreamid() {
- return new GlobalStreamId(getSourceComponent(), streamId);
- }
-
- public String getSourceComponent() {
- return context.getComponentId(taskId);
- }
-
- public int getSourceTask() {
- return taskId;
- }
-
- public String getSourceStreamId() {
- return streamId;
- }
-
- public MessageId getMessageId() {
- return id;
- }
-
- @Override
- public String toString() {
- return "source: " + getSourceComponent() + ":" + taskId + ", stream: "
- + streamId + ", id: " + id.toString() + ", "
- + values.toString();
- }
-
- @Override
- public boolean equals(Object other) {
- return this == other;
- }
-
- @Override
- public int hashCode() {
- return System.identityHashCode(this);
- }
-
- private final Keyword makeKeyword(String name) {
- return Keyword.intern(Symbol.create(name));
- }
-
- /* ILookup */
- @Override
- public Object valAt(Object o) {
- try {
- if (o instanceof Keyword) {
- return getValueByField(((Keyword) o).getName());
- } else if (o instanceof String) {
- return getValueByField((String) o);
- }
- } catch (IllegalArgumentException e) {
- }
- return null;
- }
-
- /* Seqable */
- public ISeq seq() {
- if (values.size() > 0) {
- return new Seq(getFields().toList(), values, 0);
- }
- return null;
- }
-
- static class Seq extends ASeq implements Counted {
- final List<String> fields;
- final List<Object> values;
- final int i;
-
- Seq(List<String> fields, List<Object> values, int i) {
- this.fields = fields;
- this.values = values;
- assert i >= 0;
- this.i = i;
- }
-
- public Seq(IPersistentMap meta, List<String> fields,
- List<Object> values, int i) {
- super(meta);
- this.fields = fields;
- this.values = values;
- assert i >= 0;
- this.i = i;
- }
-
- public Object first() {
- return new MapEntry(fields.get(i), values.get(i));
- }
-
- public ISeq next() {
- if (i + 1 < fields.size()) {
- return new Seq(fields, values, i + 1);
- }
- return null;
- }
-
- public int count() {
- assert fields.size() - i >= 0 : "index out of bounds";
- // i being the position in the fields of this seq, the remainder of
- // the seq is the size
- return fields.size() - i;
- }
-
- public Obj withMeta(IPersistentMap meta) {
- return new Seq(meta, fields, values, i);
- }
- }
-
- /* Indexed */
- public Object nth(int i) {
- if (i < values.size()) {
- return values.get(i);
- } else {
- return null;
- }
- }
-
- public Object nth(int i, Object notfound) {
- Object ret = nth(i);
- if (ret == null)
- ret = notfound;
- return ret;
- }
-
- /* Counted */
- public int count() {
- return values.size();
- }
-
- /* IMeta */
- public IPersistentMap meta() {
- if (_meta == null) {
- _meta = new PersistentArrayMap(new Object[] {
- makeKeyword("stream"), getSourceStreamId(),
- makeKeyword("component"), getSourceComponent(),
- makeKeyword("task"), getSourceTask() });
- }
- return _meta;
- }
-
- private PersistentArrayMap toMap() {
- Object array[] = new Object[values.size() * 2];
- List<String> fields = getFields().toList();
- for (int i = 0; i < values.size(); i++) {
- array[i * 2] = fields.get(i);
- array[(i * 2) + 1] = values.get(i);
- }
- return new PersistentArrayMap(array);
- }
-
- public IPersistentMap getMap() {
- if (_map == null) {
- setMap(toMap());
- }
- return _map;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/TupleImplExt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/TupleImplExt.java b/jstorm-client/src/main/java/backtype/storm/tuple/TupleImplExt.java
deleted file mode 100644
index 5d4b487..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/TupleImplExt.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package backtype.storm.tuple;
-
-import java.util.List;
-
-import backtype.storm.task.GeneralTopologyContext;
-
-public class TupleImplExt extends TupleImpl implements TupleExt {
-
- protected int targetTaskId;
-
- public TupleImplExt(GeneralTopologyContext context, List<Object> values,
- int taskId, String streamId) {
- super(context, values, taskId, streamId);
- }
-
- public TupleImplExt(GeneralTopologyContext context, List<Object> values,
- int taskId, String streamId, MessageId id) {
- super(context, values, taskId, streamId, id);
- }
-
- @Override
- public int getTargetTaskId() {
- return targetTaskId;
- }
-
- @Override
- public void setTargetTaskId(int targetTaskId) {
- this.targetTaskId = targetTaskId;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/tuple/Values.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/tuple/Values.java b/jstorm-client/src/main/java/backtype/storm/tuple/Values.java
deleted file mode 100644
index d374f67..0000000
--- a/jstorm-client/src/main/java/backtype/storm/tuple/Values.java
+++ /dev/null
@@ -1,20 +0,0 @@
-package backtype.storm.tuple;
-
-import java.util.ArrayList;
-
-/**
- * A convenience class for making tuple values using new Values("field1", 2, 3)
- * syntax.
- */
-public class Values extends ArrayList<Object> {
- public Values() {
-
- }
-
- public Values(Object... vals) {
- super(vals.length);
- for (Object o : vals) {
- add(o);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/BufferFileInputStream.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/BufferFileInputStream.java b/jstorm-client/src/main/java/backtype/storm/utils/BufferFileInputStream.java
deleted file mode 100644
index c3e1a20..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/BufferFileInputStream.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package backtype.storm.utils;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.util.Arrays;
-
-public class BufferFileInputStream {
- byte[] buffer;
- FileInputStream stream;
-
- public BufferFileInputStream(String file, int bufferSize)
- throws FileNotFoundException {
- stream = new FileInputStream(file);
- buffer = new byte[bufferSize];
- }
-
- public BufferFileInputStream(String file) throws FileNotFoundException {
- this(file, 15 * 1024);
- }
-
- public byte[] read() throws IOException {
- int length = stream.read(buffer);
- if (length == -1) {
- close();
- return new byte[0];
- } else if (length == buffer.length) {
- return buffer;
- } else {
- return Arrays.copyOf(buffer, length);
- }
- }
-
- public void close() throws IOException {
- stream.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/CRC32OutputStream.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/CRC32OutputStream.java b/jstorm-client/src/main/java/backtype/storm/utils/CRC32OutputStream.java
deleted file mode 100644
index 46265b0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/CRC32OutputStream.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package backtype.storm.utils;
-
-import java.io.IOException;
-import java.io.OutputStream;
-import java.util.zip.CRC32;
-
-public class CRC32OutputStream extends OutputStream {
- private CRC32 hasher;
-
- public CRC32OutputStream() {
- hasher = new CRC32();
- }
-
- public long getValue() {
- return hasher.getValue();
- }
-
- @Override
- public void write(int i) throws IOException {
- hasher.update(i);
- }
-
- @Override
- public void write(byte[] bytes, int start, int end) throws IOException {
- hasher.update(bytes, start, end);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/ClojureTimerTask.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/ClojureTimerTask.java b/jstorm-client/src/main/java/backtype/storm/utils/ClojureTimerTask.java
deleted file mode 100644
index b9094e2..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/ClojureTimerTask.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package backtype.storm.utils;
-
-import clojure.lang.IFn;
-import java.util.TimerTask;
-
-public class ClojureTimerTask extends TimerTask {
- IFn _afn;
-
- public ClojureTimerTask(IFn afn) {
- super();
- _afn = afn;
- }
-
- @Override
- public void run() {
- _afn.run();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/Container.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/Container.java b/jstorm-client/src/main/java/backtype/storm/utils/Container.java
deleted file mode 100644
index b8a6f12..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/Container.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package backtype.storm.utils;
-
-import java.io.Serializable;
-
-public class Container implements Serializable {
- public Object object;
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/DRPCClient.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/DRPCClient.java b/jstorm-client/src/main/java/backtype/storm/utils/DRPCClient.java
deleted file mode 100644
index 975d7d8..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/DRPCClient.java
+++ /dev/null
@@ -1,70 +0,0 @@
-package backtype.storm.utils;
-
-import org.apache.thrift7.TException;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.transport.TFramedTransport;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-
-import backtype.storm.generated.DRPCExecutionException;
-import backtype.storm.generated.DistributedRPC;
-
-public class DRPCClient implements DistributedRPC.Iface {
- private TTransport conn;
- private DistributedRPC.Client client;
- private String host;
- private int port;
- private Integer timeout;
-
- public DRPCClient(String host, int port, Integer timeout) {
- try {
- this.host = host;
- this.port = port;
- this.timeout = timeout;
- connect();
- } catch (TException e) {
- throw new RuntimeException(e);
- }
- }
-
- public DRPCClient(String host, int port) {
- this(host, port, null);
- }
-
- private void connect() throws TException {
- TSocket socket = new TSocket(host, port);
- if (timeout != null) {
- socket.setTimeout(timeout);
- }
- conn = new TFramedTransport(socket);
- client = new DistributedRPC.Client(new TBinaryProtocol(conn));
- conn.open();
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- public String execute(String func, String args) throws TException,
- DRPCExecutionException {
- try {
- if (client == null)
- connect();
- return client.execute(func, args);
- } catch (TException e) {
- client = null;
- throw e;
- } catch (DRPCExecutionException e) {
- client = null;
- throw e;
- }
- }
-
- public void close() {
- conn.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueue.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueue.java b/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueue.java
deleted file mode 100644
index 8d9e861..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueue.java
+++ /dev/null
@@ -1,71 +0,0 @@
-package backtype.storm.utils;
-
-import backtype.storm.metric.api.IStatefulObject;
-
-import com.lmax.disruptor.EventHandler;
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.dsl.ProducerType;
-
-/**
- *
- * A single consumer queue that uses the LMAX Disruptor. They key to the
- * performance is the ability to catch up to the producer by processing tuples
- * in batches.
- */
-public abstract class DisruptorQueue implements IStatefulObject {
- public static void setUseSleep(boolean useSleep) {
- DisruptorQueueImpl.setUseSleep(useSleep);
- }
-
- private static boolean CAPACITY_LIMITED = false;
-
- public static void setLimited(boolean limited) {
- CAPACITY_LIMITED = limited;
- }
-
- public static DisruptorQueue mkInstance(String queueName,
- ProducerType producerType, int bufferSize, WaitStrategy wait) {
- if (CAPACITY_LIMITED == true) {
- return new DisruptorQueueImpl(queueName, producerType, bufferSize,
- wait);
- } else {
- return new DisruptorWrapBlockingQueue(queueName, producerType,
- bufferSize, wait);
- }
- }
-
- public abstract String getName();
-
-
-
- public abstract void haltWithInterrupt();
-
- public abstract Object poll();
-
- public abstract Object take();
-
- public abstract void consumeBatch(EventHandler<Object> handler);
-
- public abstract void consumeBatchWhenAvailable(EventHandler<Object> handler);
-
- public abstract void publish(Object obj);
-
- public abstract void publish(Object obj, boolean block)
- throws InsufficientCapacityException;
-
- public abstract void consumerStarted();
-
- public abstract void clear();
-
- public abstract long population();
-
- public abstract long capacity();
-
- public abstract long writePos();
-
- public abstract long readPos();
-
- public abstract float pctFull();
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueueImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueueImpl.java b/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueueImpl.java
deleted file mode 100644
index 0c334b5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorQueueImpl.java
+++ /dev/null
@@ -1,298 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.concurrent.ConcurrentLinkedQueue;
-import java.util.concurrent.locks.Lock;
-import java.util.concurrent.locks.ReentrantReadWriteLock;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.metric.api.IStatefulObject;
-import backtype.storm.utils.disruptor.AbstractSequencerExt;
-import backtype.storm.utils.disruptor.RingBuffer;
-
-import com.lmax.disruptor.AlertException;
-import com.lmax.disruptor.EventFactory;
-import com.lmax.disruptor.EventHandler;
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.Sequence;
-import com.lmax.disruptor.SequenceBarrier;
-import com.lmax.disruptor.TimeoutException;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.dsl.ProducerType;
-
-/**
- *
- * A single consumer queue that uses the LMAX Disruptor. They key to the
- * performance is the ability to catch up to the producer by processing tuples
- * in batches.
- */
-public class DisruptorQueueImpl extends DisruptorQueue {
- private static final Logger LOG = Logger.getLogger(DisruptorQueueImpl.class);
- static boolean useSleep = true;
- public static void setUseSleep(boolean useSleep) {
- AbstractSequencerExt.setWaitSleep(useSleep);
- }
-
- private static final Object FLUSH_CACHE = new Object();
- private static final Object INTERRUPT = new Object();
- private static final String PREFIX = "disruptor-";
-
- private final String _queueName;
- private final RingBuffer<MutableObject> _buffer;
- private final Sequence _consumer;
- private final SequenceBarrier _barrier;
-
- // TODO: consider having a threadlocal cache of this variable to speed up
- // reads?
- volatile boolean consumerStartedFlag = false;
-
- private final HashMap<String, Object> state = new HashMap<String, Object>(4);
- private final ConcurrentLinkedQueue<Object> _cache = new ConcurrentLinkedQueue<Object>();
- private final ReentrantReadWriteLock cacheLock = new ReentrantReadWriteLock();
- private final Lock readLock = cacheLock.readLock();
- private final Lock writeLock = cacheLock.writeLock();
-
- public DisruptorQueueImpl(String queueName, ProducerType producerType,
- int bufferSize, WaitStrategy wait) {
- this._queueName = PREFIX + queueName;
- _buffer = RingBuffer.create(producerType, new ObjectEventFactory(),
- bufferSize, wait);
- _consumer = new Sequence();
- _barrier = _buffer.newBarrier();
- _buffer.addGatingSequences(_consumer);
- if (producerType == ProducerType.SINGLE) {
- consumerStartedFlag = true;
- } else {
- // make sure we flush the pending messages in cache first
- if (bufferSize < 2) {
- throw new RuntimeException("QueueSize must >= 2");
- }
- try {
- publishDirect(FLUSH_CACHE, true);
- } catch (InsufficientCapacityException e) {
- throw new RuntimeException("This code should be unreachable!",
- e);
- }
- }
- }
-
- public String getName() {
- return _queueName;
- }
-
- public void consumeBatch(EventHandler<Object> handler) {
- consumeBatchToCursor(_barrier.getCursor(), handler);
- }
-
- public void haltWithInterrupt() {
- publish(INTERRUPT);
- }
-
- public Object poll() {
- // @@@
- // should use _cache.isEmpty, but it is slow
- // I will change the logic later
- if (consumerStartedFlag == false) {
- return _cache.poll();
- }
-
- final long nextSequence = _consumer.get() + 1;
- if (nextSequence <= _barrier.getCursor()) {
- MutableObject mo = _buffer.get(nextSequence);
- _consumer.set(nextSequence);
- Object ret = mo.o;
- mo.setObject(null);
- return ret;
- }
- return null;
- }
-
- public Object take() {
- // @@@
- // should use _cache.isEmpty, but it is slow
- // I will change the logic later
- if (consumerStartedFlag == false) {
- return _cache.poll();
- }
-
- final long nextSequence = _consumer.get() + 1;
- // final long availableSequence;
- try {
- _barrier.waitFor(nextSequence);
- } catch (AlertException e) {
- LOG.error(e.getCause(), e);
- throw new RuntimeException(e);
- } catch (InterruptedException e) {
- LOG.error("InterruptedException " + e.getCause());
- // throw new RuntimeException(e);
- return null;
- } catch (TimeoutException e) {
- LOG.error(e.getCause(), e);
- return null;
- }
- MutableObject mo = _buffer.get(nextSequence);
- _consumer.set(nextSequence);
- Object ret = mo.o;
- mo.setObject(null);
- return ret;
- }
-
- public void consumeBatchWhenAvailable(EventHandler<Object> handler) {
- try {
- final long nextSequence = _consumer.get() + 1;
- final long availableSequence = _barrier.waitFor(nextSequence);
- if (availableSequence >= nextSequence) {
- consumeBatchToCursor(availableSequence, handler);
- }
- } catch (AlertException e) {
- LOG.error(e.getCause(), e);
- throw new RuntimeException(e);
- } catch (InterruptedException e) {
- LOG.error("InterruptedException " + e.getCause());
- return;
- }catch (TimeoutException e) {
- LOG.error(e.getCause(), e);
- return ;
- }
- }
-
- public void consumeBatchToCursor(long cursor, EventHandler<Object> handler){
- for (long curr = _consumer.get() + 1; curr <= cursor; curr++) {
- try {
- MutableObject mo = _buffer.get(curr);
- Object o = mo.o;
- mo.setObject(null);
- if (o == FLUSH_CACHE) {
- Object c = null;
- while (true) {
- c = _cache.poll();
- if (c == null)
- break;
- else
- handler.onEvent(c, curr, true);
- }
- } else if (o == INTERRUPT) {
- throw new InterruptedException(
- "Disruptor processing interrupted");
- } else {
- handler.onEvent(o, curr, curr == cursor);
- }
- } catch (InterruptedException e) {
- // throw new RuntimeException(e);
- LOG.error(e.getCause());
- return;
- } catch (Exception e) {
- LOG.error(e.getCause(), e);
- throw new RuntimeException(e);
- }
- }
- // TODO: only set this if the consumer cursor has changed?
- _consumer.set(cursor);
- }
-
- /*
- * Caches until consumerStarted is called, upon which the cache is flushed
- * to the consumer
- */
- public void publish(Object obj) {
- try {
- publish(obj, true);
- } catch (InsufficientCapacityException ex) {
- throw new RuntimeException("This code should be unreachable!");
- }
- }
-
- public void tryPublish(Object obj) throws InsufficientCapacityException {
- publish(obj, false);
- }
-
- public void publish(Object obj, boolean block)
- throws InsufficientCapacityException {
-
- boolean publishNow = consumerStartedFlag;
-
- if (!publishNow) {
- readLock.lock();
- try {
- publishNow = consumerStartedFlag;
- if (!publishNow) {
- _cache.add(obj);
- }
- } finally {
- readLock.unlock();
- }
- }
-
- if (publishNow) {
- publishDirect(obj, block);
- }
- }
-
- protected void publishDirect(Object obj, boolean block)
- throws InsufficientCapacityException {
- final long id;
- if (block) {
- id = _buffer.next();
- } else {
- id = _buffer.tryNext(1);
- }
- final MutableObject m = _buffer.get(id);
- m.setObject(obj);
- _buffer.publish(id);
- }
-
- public void consumerStarted() {
-
- writeLock.lock();
- consumerStartedFlag = true;
-
- writeLock.unlock();
- }
-
- public void clear() {
- while (population() != 0L) {
- poll();
- }
- }
-
- public long population() {
- return (writePos() - readPos());
- }
-
- public long capacity() {
- return _buffer.getBufferSize();
- }
-
- public long writePos() {
- return _buffer.getCursor();
- }
-
- public long readPos() {
- return _consumer.get();
- }
-
- public float pctFull() {
- return (1.0F * population() / capacity());
- }
-
- @Override
- public Object getState() {
- // get readPos then writePos so it's never an under-estimate
- long rp = readPos();
- long wp = writePos();
- state.put("capacity", capacity());
- state.put("population", wp - rp);
- state.put("write_pos", wp);
- state.put("read_pos", rp);
- return state;
- }
-
- public static class ObjectEventFactory implements
- EventFactory<MutableObject> {
- @Override
- public MutableObject newInstance() {
- return new MutableObject();
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/DisruptorWrapBlockingQueue.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorWrapBlockingQueue.java b/jstorm-client/src/main/java/backtype/storm/utils/DisruptorWrapBlockingQueue.java
deleted file mode 100644
index a701f39..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/DisruptorWrapBlockingQueue.java
+++ /dev/null
@@ -1,192 +0,0 @@
-package backtype.storm.utils;
-
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.LinkedBlockingDeque;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.log4j.Logger;
-
-import backtype.storm.metric.api.IStatefulObject;
-
-import com.lmax.disruptor.EventFactory;
-import com.lmax.disruptor.EventHandler;
-import com.lmax.disruptor.InsufficientCapacityException;
-import com.lmax.disruptor.WaitStrategy;
-import com.lmax.disruptor.dsl.ProducerType;
-
-/**
- *
- * A single consumer queue that uses the LMAX Disruptor. They key to the
- * performance is the ability to catch up to the producer by processing tuples
- * in batches.
- */
-public class DisruptorWrapBlockingQueue extends DisruptorQueue {
- private static final Logger LOG = Logger
- .getLogger(DisruptorWrapBlockingQueue.class);
-
- private static final long QUEUE_CAPACITY = 512;
- private LinkedBlockingDeque<Object> queue;
-
- private String queueName;
-
- public DisruptorWrapBlockingQueue(String queueName,
- ProducerType producerType, int bufferSize, WaitStrategy wait) {
- this.queueName = queueName;
- queue = new LinkedBlockingDeque<Object>();
- }
-
- public String getName() {
- return queueName;
- }
-
- // poll method
- public void consumeBatch(EventHandler<Object> handler) {
- consumeBatchToCursor(0, handler);
- }
-
- public void haltWithInterrupt() {
- }
-
- public Object poll() {
- return queue.poll();
- }
-
- public Object take() {
- try {
- return queue.take();
- } catch (InterruptedException e) {
- return null;
- }
- }
-
- public void drainQueue(Object object, EventHandler<Object> handler) {
- while (object != null) {
- try {
- handler.onEvent(object, 0, false);
- object = queue.poll();
- } catch (InterruptedException e) {
- LOG.warn("Occur interrupt error, " + object);
- break;
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
- }
-
- public void consumeBatchWhenAvailable(EventHandler<Object> handler) {
- Object object = queue.poll();
- if (object == null) {
- try {
- object = queue.take();
- } catch (InterruptedException e) {
- LOG.warn("Occur interrupt error, " + object);
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- drainQueue(object, handler);
-
- }
-
- public void consumeBatchToCursor(long cursor, EventHandler<Object> handler) {
- Object object = queue.poll();
- drainQueue(object, handler);
- }
-
- /*
- * Caches until consumerStarted is called, upon which the cache is flushed
- * to the consumer
- */
- public void publish(Object obj) {
- boolean isSuccess = queue.offer(obj);
- while (isSuccess == false) {
- try {
- Thread.sleep(1);
- } catch (InterruptedException e) {
- }
- isSuccess = queue.offer(obj);
- }
-
- }
-
- public void tryPublish(Object obj) throws InsufficientCapacityException {
- boolean isSuccess = queue.offer(obj);
- if (isSuccess == false) {
- throw InsufficientCapacityException.INSTANCE;
- }
-
- }
-
- public void publish(Object obj, boolean block)
- throws InsufficientCapacityException {
- if (block == true) {
- publish(obj);
- } else {
- tryPublish(obj);
- }
- }
-
- public void consumerStarted() {
- }
-
- private void flushCache() {
- }
-
- public void clear() {
- queue.clear();
- }
-
- public long population() {
- return queue.size();
- }
-
- public long capacity() {
- long used = queue.size();
- if (used < QUEUE_CAPACITY) {
- return QUEUE_CAPACITY;
- } else {
- return used;
- }
- }
-
- public long writePos() {
- return 0;
- }
-
- public long readPos() {
- return queue.size();
- }
-
- public float pctFull() {
- long used = queue.size();
- if (used < QUEUE_CAPACITY) {
- return (1.0F * used / QUEUE_CAPACITY);
- } else {
- return 1.0f;
- }
- }
-
- @Override
- public Object getState() {
- Map state = new HashMap<String, Object>();
- // get readPos then writePos so it's never an under-estimate
- long rp = readPos();
- long wp = writePos();
- state.put("capacity", capacity());
- state.put("population", wp - rp);
- state.put("write_pos", wp);
- state.put("read_pos", rp);
- return state;
- }
-
- public static class ObjectEventFactory implements
- EventFactory<MutableObject> {
- @Override
- public MutableObject newInstance() {
- return new MutableObject();
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/utils/IndifferentAccessMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/utils/IndifferentAccessMap.java b/jstorm-client/src/main/java/backtype/storm/utils/IndifferentAccessMap.java
deleted file mode 100644
index 74c4a63..0000000
--- a/jstorm-client/src/main/java/backtype/storm/utils/IndifferentAccessMap.java
+++ /dev/null
@@ -1,169 +0,0 @@
-package backtype.storm.utils;
-
-import clojure.lang.ILookup;
-import clojure.lang.ISeq;
-import clojure.lang.AFn;
-import clojure.lang.IPersistentMap;
-import clojure.lang.PersistentArrayMap;
-import clojure.lang.IMapEntry;
-import clojure.lang.IPersistentCollection;
-import clojure.lang.Keyword;
-import java.util.Iterator;
-import java.util.Map;
-import java.util.Collection;
-import java.util.Set;
-
-public class IndifferentAccessMap extends AFn implements ILookup,
- IPersistentMap, Map {
-
- protected IPersistentMap _map;
-
- protected IndifferentAccessMap() {
- }
-
- public IndifferentAccessMap(IPersistentMap map) {
- setMap(map);
- }
-
- public IPersistentMap getMap() {
- return _map;
- }
-
- public IPersistentMap setMap(IPersistentMap map) {
- _map = map;
- return _map;
- }
-
- public int size() {
- return ((Map) getMap()).size();
- }
-
- public int count() {
- return size();
- }
-
- public ISeq seq() {
- return getMap().seq();
- }
-
- @Override
- public Object valAt(Object o) {
- if (o instanceof Keyword) {
- return valAt(((Keyword) o).getName());
- }
- return getMap().valAt(o);
- }
-
- @Override
- public Object valAt(Object o, Object def) {
- Object ret = valAt(o);
- if (ret == null)
- ret = def;
- return ret;
- }
-
- /* IFn */
- @Override
- public Object invoke(Object o) {
- return valAt(o);
- }
-
- @Override
- public Object invoke(Object o, Object notfound) {
- return valAt(o, notfound);
- }
-
- /* IPersistentMap */
- /* Naive implementation, but it might be good enough */
- public IPersistentMap assoc(Object k, Object v) {
- if (k instanceof Keyword)
- return assoc(((Keyword) k).getName(), v);
-
- return new IndifferentAccessMap(getMap().assoc(k, v));
- }
-
- public IPersistentMap assocEx(Object k, Object v) {
- if (k instanceof Keyword)
- return assocEx(((Keyword) k).getName(), v);
-
- return new IndifferentAccessMap(getMap().assocEx(k, v));
- }
-
- public IPersistentMap without(Object k) {
- if (k instanceof Keyword)
- return without(((Keyword) k).getName());
-
- return new IndifferentAccessMap(getMap().without(k));
- }
-
- public boolean containsKey(Object k) {
- if (k instanceof Keyword)
- return containsKey(((Keyword) k).getName());
- return getMap().containsKey(k);
- }
-
- public IMapEntry entryAt(Object k) {
- if (k instanceof Keyword)
- return entryAt(((Keyword) k).getName());
-
- return getMap().entryAt(k);
- }
-
- public IPersistentCollection cons(Object o) {
- return getMap().cons(o);
- }
-
- public IPersistentCollection empty() {
- return new IndifferentAccessMap(PersistentArrayMap.EMPTY);
- }
-
- public boolean equiv(Object o) {
- return getMap().equiv(o);
- }
-
- public Iterator iterator() {
- return getMap().iterator();
- }
-
- /* Map */
- public boolean containsValue(Object v) {
- return ((Map) getMap()).containsValue(v);
- }
-
- public Set entrySet() {
- return ((Map) getMap()).entrySet();
- }
-
- public Object get(Object k) {
- return valAt(k);
- }
-
- public boolean isEmpty() {
- return ((Map) getMap()).isEmpty();
- }
-
- public Set keySet() {
- return ((Map) getMap()).keySet();
- }
-
- public Collection values() {
- return ((Map) getMap()).values();
- }
-
- /* Not implemented */
- public void clear() {
- throw new UnsupportedOperationException();
- }
-
- public Object put(Object k, Object v) {
- throw new UnsupportedOperationException();
- }
-
- public void putAll(Map m) {
- throw new UnsupportedOperationException();
- }
-
- public Object remove(Object k) {
- throw new UnsupportedOperationException();
- }
-}
[30/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TopologySummary.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TopologySummary.java b/jstorm-client/src/main/java/backtype/storm/generated/TopologySummary.java
deleted file mode 100644
index e2ee708..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TopologySummary.java
+++ /dev/null
@@ -1,900 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TopologySummary implements org.apache.thrift7.TBase<TopologySummary, TopologySummary._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TopologySummary");
-
- private static final org.apache.thrift7.protocol.TField ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("id", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("name", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField STATUS_FIELD_DESC = new org.apache.thrift7.protocol.TField("status", org.apache.thrift7.protocol.TType.STRING, (short)3);
- private static final org.apache.thrift7.protocol.TField UPTIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("uptime_secs", org.apache.thrift7.protocol.TType.I32, (short)4);
- private static final org.apache.thrift7.protocol.TField NUM_TASKS_FIELD_DESC = new org.apache.thrift7.protocol.TField("num_tasks", org.apache.thrift7.protocol.TType.I32, (short)5);
- private static final org.apache.thrift7.protocol.TField NUM_WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("num_workers", org.apache.thrift7.protocol.TType.I32, (short)6);
- private static final org.apache.thrift7.protocol.TField ERROR_INFO_FIELD_DESC = new org.apache.thrift7.protocol.TField("error_info", org.apache.thrift7.protocol.TType.STRING, (short)7);
-
- private String id; // required
- private String name; // required
- private String status; // required
- private int uptime_secs; // required
- private int num_tasks; // required
- private int num_workers; // required
- private String error_info; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- ID((short)1, "id"),
- NAME((short)2, "name"),
- STATUS((short)3, "status"),
- UPTIME_SECS((short)4, "uptime_secs"),
- NUM_TASKS((short)5, "num_tasks"),
- NUM_WORKERS((short)6, "num_workers"),
- ERROR_INFO((short)7, "error_info");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // ID
- return ID;
- case 2: // NAME
- return NAME;
- case 3: // STATUS
- return STATUS;
- case 4: // UPTIME_SECS
- return UPTIME_SECS;
- case 5: // NUM_TASKS
- return NUM_TASKS;
- case 6: // NUM_WORKERS
- return NUM_WORKERS;
- case 7: // ERROR_INFO
- return ERROR_INFO;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __UPTIME_SECS_ISSET_ID = 0;
- private static final int __NUM_TASKS_ISSET_ID = 1;
- private static final int __NUM_WORKERS_ISSET_ID = 2;
- private BitSet __isset_bit_vector = new BitSet(3);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.ID, new org.apache.thrift7.meta_data.FieldMetaData("id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.NAME, new org.apache.thrift7.meta_data.FieldMetaData("name", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.STATUS, new org.apache.thrift7.meta_data.FieldMetaData("status", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.UPTIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("uptime_secs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.NUM_TASKS, new org.apache.thrift7.meta_data.FieldMetaData("num_tasks", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.NUM_WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("num_workers", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.ERROR_INFO, new org.apache.thrift7.meta_data.FieldMetaData("error_info", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TopologySummary.class, metaDataMap);
- }
-
- public TopologySummary() {
- }
-
- public TopologySummary(
- String id,
- String name,
- String status,
- int uptime_secs,
- int num_tasks,
- int num_workers,
- String error_info)
- {
- this();
- this.id = id;
- this.name = name;
- this.status = status;
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- this.num_tasks = num_tasks;
- set_num_tasks_isSet(true);
- this.num_workers = num_workers;
- set_num_workers_isSet(true);
- this.error_info = error_info;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TopologySummary(TopologySummary other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_id()) {
- this.id = other.id;
- }
- if (other.is_set_name()) {
- this.name = other.name;
- }
- if (other.is_set_status()) {
- this.status = other.status;
- }
- this.uptime_secs = other.uptime_secs;
- this.num_tasks = other.num_tasks;
- this.num_workers = other.num_workers;
- if (other.is_set_error_info()) {
- this.error_info = other.error_info;
- }
- }
-
- public TopologySummary deepCopy() {
- return new TopologySummary(this);
- }
-
- @Override
- public void clear() {
- this.id = null;
- this.name = null;
- this.status = null;
- set_uptime_secs_isSet(false);
- this.uptime_secs = 0;
- set_num_tasks_isSet(false);
- this.num_tasks = 0;
- set_num_workers_isSet(false);
- this.num_workers = 0;
- this.error_info = null;
- }
-
- public String get_id() {
- return this.id;
- }
-
- public void set_id(String id) {
- this.id = id;
- }
-
- public void unset_id() {
- this.id = null;
- }
-
- /** Returns true if field id is set (has been assigned a value) and false otherwise */
- public boolean is_set_id() {
- return this.id != null;
- }
-
- public void set_id_isSet(boolean value) {
- if (!value) {
- this.id = null;
- }
- }
-
- public String get_name() {
- return this.name;
- }
-
- public void set_name(String name) {
- this.name = name;
- }
-
- public void unset_name() {
- this.name = null;
- }
-
- /** Returns true if field name is set (has been assigned a value) and false otherwise */
- public boolean is_set_name() {
- return this.name != null;
- }
-
- public void set_name_isSet(boolean value) {
- if (!value) {
- this.name = null;
- }
- }
-
- public String get_status() {
- return this.status;
- }
-
- public void set_status(String status) {
- this.status = status;
- }
-
- public void unset_status() {
- this.status = null;
- }
-
- /** Returns true if field status is set (has been assigned a value) and false otherwise */
- public boolean is_set_status() {
- return this.status != null;
- }
-
- public void set_status_isSet(boolean value) {
- if (!value) {
- this.status = null;
- }
- }
-
- public int get_uptime_secs() {
- return this.uptime_secs;
- }
-
- public void set_uptime_secs(int uptime_secs) {
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- }
-
- public void unset_uptime_secs() {
- __isset_bit_vector.clear(__UPTIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field uptime_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_uptime_secs() {
- return __isset_bit_vector.get(__UPTIME_SECS_ISSET_ID);
- }
-
- public void set_uptime_secs_isSet(boolean value) {
- __isset_bit_vector.set(__UPTIME_SECS_ISSET_ID, value);
- }
-
- public int get_num_tasks() {
- return this.num_tasks;
- }
-
- public void set_num_tasks(int num_tasks) {
- this.num_tasks = num_tasks;
- set_num_tasks_isSet(true);
- }
-
- public void unset_num_tasks() {
- __isset_bit_vector.clear(__NUM_TASKS_ISSET_ID);
- }
-
- /** Returns true if field num_tasks is set (has been assigned a value) and false otherwise */
- public boolean is_set_num_tasks() {
- return __isset_bit_vector.get(__NUM_TASKS_ISSET_ID);
- }
-
- public void set_num_tasks_isSet(boolean value) {
- __isset_bit_vector.set(__NUM_TASKS_ISSET_ID, value);
- }
-
- public int get_num_workers() {
- return this.num_workers;
- }
-
- public void set_num_workers(int num_workers) {
- this.num_workers = num_workers;
- set_num_workers_isSet(true);
- }
-
- public void unset_num_workers() {
- __isset_bit_vector.clear(__NUM_WORKERS_ISSET_ID);
- }
-
- /** Returns true if field num_workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_num_workers() {
- return __isset_bit_vector.get(__NUM_WORKERS_ISSET_ID);
- }
-
- public void set_num_workers_isSet(boolean value) {
- __isset_bit_vector.set(__NUM_WORKERS_ISSET_ID, value);
- }
-
- public String get_error_info() {
- return this.error_info;
- }
-
- public void set_error_info(String error_info) {
- this.error_info = error_info;
- }
-
- public void unset_error_info() {
- this.error_info = null;
- }
-
- /** Returns true if field error_info is set (has been assigned a value) and false otherwise */
- public boolean is_set_error_info() {
- return this.error_info != null;
- }
-
- public void set_error_info_isSet(boolean value) {
- if (!value) {
- this.error_info = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case ID:
- if (value == null) {
- unset_id();
- } else {
- set_id((String)value);
- }
- break;
-
- case NAME:
- if (value == null) {
- unset_name();
- } else {
- set_name((String)value);
- }
- break;
-
- case STATUS:
- if (value == null) {
- unset_status();
- } else {
- set_status((String)value);
- }
- break;
-
- case UPTIME_SECS:
- if (value == null) {
- unset_uptime_secs();
- } else {
- set_uptime_secs((Integer)value);
- }
- break;
-
- case NUM_TASKS:
- if (value == null) {
- unset_num_tasks();
- } else {
- set_num_tasks((Integer)value);
- }
- break;
-
- case NUM_WORKERS:
- if (value == null) {
- unset_num_workers();
- } else {
- set_num_workers((Integer)value);
- }
- break;
-
- case ERROR_INFO:
- if (value == null) {
- unset_error_info();
- } else {
- set_error_info((String)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case ID:
- return get_id();
-
- case NAME:
- return get_name();
-
- case STATUS:
- return get_status();
-
- case UPTIME_SECS:
- return Integer.valueOf(get_uptime_secs());
-
- case NUM_TASKS:
- return Integer.valueOf(get_num_tasks());
-
- case NUM_WORKERS:
- return Integer.valueOf(get_num_workers());
-
- case ERROR_INFO:
- return get_error_info();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case ID:
- return is_set_id();
- case NAME:
- return is_set_name();
- case STATUS:
- return is_set_status();
- case UPTIME_SECS:
- return is_set_uptime_secs();
- case NUM_TASKS:
- return is_set_num_tasks();
- case NUM_WORKERS:
- return is_set_num_workers();
- case ERROR_INFO:
- return is_set_error_info();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TopologySummary)
- return this.equals((TopologySummary)that);
- return false;
- }
-
- public boolean equals(TopologySummary that) {
- if (that == null)
- return false;
-
- boolean this_present_id = true && this.is_set_id();
- boolean that_present_id = true && that.is_set_id();
- if (this_present_id || that_present_id) {
- if (!(this_present_id && that_present_id))
- return false;
- if (!this.id.equals(that.id))
- return false;
- }
-
- boolean this_present_name = true && this.is_set_name();
- boolean that_present_name = true && that.is_set_name();
- if (this_present_name || that_present_name) {
- if (!(this_present_name && that_present_name))
- return false;
- if (!this.name.equals(that.name))
- return false;
- }
-
- boolean this_present_status = true && this.is_set_status();
- boolean that_present_status = true && that.is_set_status();
- if (this_present_status || that_present_status) {
- if (!(this_present_status && that_present_status))
- return false;
- if (!this.status.equals(that.status))
- return false;
- }
-
- boolean this_present_uptime_secs = true;
- boolean that_present_uptime_secs = true;
- if (this_present_uptime_secs || that_present_uptime_secs) {
- if (!(this_present_uptime_secs && that_present_uptime_secs))
- return false;
- if (this.uptime_secs != that.uptime_secs)
- return false;
- }
-
- boolean this_present_num_tasks = true;
- boolean that_present_num_tasks = true;
- if (this_present_num_tasks || that_present_num_tasks) {
- if (!(this_present_num_tasks && that_present_num_tasks))
- return false;
- if (this.num_tasks != that.num_tasks)
- return false;
- }
-
- boolean this_present_num_workers = true;
- boolean that_present_num_workers = true;
- if (this_present_num_workers || that_present_num_workers) {
- if (!(this_present_num_workers && that_present_num_workers))
- return false;
- if (this.num_workers != that.num_workers)
- return false;
- }
-
- boolean this_present_error_info = true && this.is_set_error_info();
- boolean that_present_error_info = true && that.is_set_error_info();
- if (this_present_error_info || that_present_error_info) {
- if (!(this_present_error_info && that_present_error_info))
- return false;
- if (!this.error_info.equals(that.error_info))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_id = true && (is_set_id());
- builder.append(present_id);
- if (present_id)
- builder.append(id);
-
- boolean present_name = true && (is_set_name());
- builder.append(present_name);
- if (present_name)
- builder.append(name);
-
- boolean present_status = true && (is_set_status());
- builder.append(present_status);
- if (present_status)
- builder.append(status);
-
- boolean present_uptime_secs = true;
- builder.append(present_uptime_secs);
- if (present_uptime_secs)
- builder.append(uptime_secs);
-
- boolean present_num_tasks = true;
- builder.append(present_num_tasks);
- if (present_num_tasks)
- builder.append(num_tasks);
-
- boolean present_num_workers = true;
- builder.append(present_num_workers);
- if (present_num_workers)
- builder.append(num_workers);
-
- boolean present_error_info = true && (is_set_error_info());
- builder.append(present_error_info);
- if (present_error_info)
- builder.append(error_info);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TopologySummary other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TopologySummary typedOther = (TopologySummary)other;
-
- lastComparison = Boolean.valueOf(is_set_id()).compareTo(typedOther.is_set_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.id, typedOther.id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_name()).compareTo(typedOther.is_set_name());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_name()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.name, typedOther.name);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_status()).compareTo(typedOther.is_set_status());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_status()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.status, typedOther.status);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_uptime_secs()).compareTo(typedOther.is_set_uptime_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_uptime_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.uptime_secs, typedOther.uptime_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_num_tasks()).compareTo(typedOther.is_set_num_tasks());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_num_tasks()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.num_tasks, typedOther.num_tasks);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_num_workers()).compareTo(typedOther.is_set_num_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_num_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.num_workers, typedOther.num_workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_error_info()).compareTo(typedOther.is_set_error_info());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_error_info()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.error_info, typedOther.error_info);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.name = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // STATUS
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.status = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // UPTIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.uptime_secs = iprot.readI32();
- set_uptime_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // NUM_TASKS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.num_tasks = iprot.readI32();
- set_num_tasks_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // NUM_WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.num_workers = iprot.readI32();
- set_num_workers_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 7: // ERROR_INFO
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.error_info = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.id != null) {
- oprot.writeFieldBegin(ID_FIELD_DESC);
- oprot.writeString(this.id);
- oprot.writeFieldEnd();
- }
- if (this.name != null) {
- oprot.writeFieldBegin(NAME_FIELD_DESC);
- oprot.writeString(this.name);
- oprot.writeFieldEnd();
- }
- if (this.status != null) {
- oprot.writeFieldBegin(STATUS_FIELD_DESC);
- oprot.writeString(this.status);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(UPTIME_SECS_FIELD_DESC);
- oprot.writeI32(this.uptime_secs);
- oprot.writeFieldEnd();
- oprot.writeFieldBegin(NUM_TASKS_FIELD_DESC);
- oprot.writeI32(this.num_tasks);
- oprot.writeFieldEnd();
- oprot.writeFieldBegin(NUM_WORKERS_FIELD_DESC);
- oprot.writeI32(this.num_workers);
- oprot.writeFieldEnd();
- if (this.error_info != null) {
- oprot.writeFieldBegin(ERROR_INFO_FIELD_DESC);
- oprot.writeString(this.error_info);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TopologySummary(");
- boolean first = true;
-
- sb.append("id:");
- if (this.id == null) {
- sb.append("null");
- } else {
- sb.append(this.id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("name:");
- if (this.name == null) {
- sb.append("null");
- } else {
- sb.append(this.name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("status:");
- if (this.status == null) {
- sb.append("null");
- } else {
- sb.append(this.status);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("uptime_secs:");
- sb.append(this.uptime_secs);
- first = false;
- if (!first) sb.append(", ");
- sb.append("num_tasks:");
- sb.append(this.num_tasks);
- first = false;
- if (!first) sb.append(", ");
- sb.append("num_workers:");
- sb.append(this.num_workers);
- first = false;
- if (!first) sb.append(", ");
- sb.append("error_info:");
- if (this.error_info == null) {
- sb.append("null");
- } else {
- sb.append(this.error_info);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'id' is unset! Struct:" + toString());
- }
-
- if (!is_set_name()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
- }
-
- if (!is_set_status()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'status' is unset! Struct:" + toString());
- }
-
- if (!is_set_uptime_secs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'uptime_secs' is unset! Struct:" + toString());
- }
-
- if (!is_set_num_tasks()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'num_tasks' is unset! Struct:" + toString());
- }
-
- if (!is_set_num_workers()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'num_workers' is unset! Struct:" + toString());
- }
-
- if (!is_set_error_info()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'error_info' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/UserDefMetric.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/UserDefMetric.java b/jstorm-client/src/main/java/backtype/storm/generated/UserDefMetric.java
deleted file mode 100644
index ddf2de3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/UserDefMetric.java
+++ /dev/null
@@ -1,522 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class UserDefMetric implements org.apache.thrift7.TBase<UserDefMetric, UserDefMetric._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("UserDefMetric");
-
- private static final org.apache.thrift7.protocol.TField TYPE_FIELD_DESC = new org.apache.thrift7.protocol.TField("type", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField NAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("name", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField VALUE_FIELD_DESC = new org.apache.thrift7.protocol.TField("value", org.apache.thrift7.protocol.TType.DOUBLE, (short)3);
-
- private String type; // required
- private String name; // required
- private double value; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- TYPE((short)1, "type"),
- NAME((short)2, "name"),
- VALUE((short)3, "value");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // TYPE
- return TYPE;
- case 2: // NAME
- return NAME;
- case 3: // VALUE
- return VALUE;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __VALUE_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TYPE, new org.apache.thrift7.meta_data.FieldMetaData("type", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.NAME, new org.apache.thrift7.meta_data.FieldMetaData("name", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.VALUE, new org.apache.thrift7.meta_data.FieldMetaData("value", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(UserDefMetric.class, metaDataMap);
- }
-
- public UserDefMetric() {
- }
-
- public UserDefMetric(
- String type,
- String name,
- double value)
- {
- this();
- this.type = type;
- this.name = name;
- this.value = value;
- set_value_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public UserDefMetric(UserDefMetric other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_type()) {
- this.type = other.type;
- }
- if (other.is_set_name()) {
- this.name = other.name;
- }
- this.value = other.value;
- }
-
- public UserDefMetric deepCopy() {
- return new UserDefMetric(this);
- }
-
- @Override
- public void clear() {
- this.type = null;
- this.name = null;
- set_value_isSet(false);
- this.value = 0.0;
- }
-
- public String get_type() {
- return this.type;
- }
-
- public void set_type(String type) {
- this.type = type;
- }
-
- public void unset_type() {
- this.type = null;
- }
-
- /** Returns true if field type is set (has been assigned a value) and false otherwise */
- public boolean is_set_type() {
- return this.type != null;
- }
-
- public void set_type_isSet(boolean value) {
- if (!value) {
- this.type = null;
- }
- }
-
- public String get_name() {
- return this.name;
- }
-
- public void set_name(String name) {
- this.name = name;
- }
-
- public void unset_name() {
- this.name = null;
- }
-
- /** Returns true if field name is set (has been assigned a value) and false otherwise */
- public boolean is_set_name() {
- return this.name != null;
- }
-
- public void set_name_isSet(boolean value) {
- if (!value) {
- this.name = null;
- }
- }
-
- public double get_value() {
- return this.value;
- }
-
- public void set_value(double value) {
- this.value = value;
- set_value_isSet(true);
- }
-
- public void unset_value() {
- __isset_bit_vector.clear(__VALUE_ISSET_ID);
- }
-
- /** Returns true if field value is set (has been assigned a value) and false otherwise */
- public boolean is_set_value() {
- return __isset_bit_vector.get(__VALUE_ISSET_ID);
- }
-
- public void set_value_isSet(boolean value) {
- __isset_bit_vector.set(__VALUE_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case TYPE:
- if (value == null) {
- unset_type();
- } else {
- set_type((String)value);
- }
- break;
-
- case NAME:
- if (value == null) {
- unset_name();
- } else {
- set_name((String)value);
- }
- break;
-
- case VALUE:
- if (value == null) {
- unset_value();
- } else {
- set_value((Double)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case TYPE:
- return get_type();
-
- case NAME:
- return get_name();
-
- case VALUE:
- return Double.valueOf(get_value());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case TYPE:
- return is_set_type();
- case NAME:
- return is_set_name();
- case VALUE:
- return is_set_value();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof UserDefMetric)
- return this.equals((UserDefMetric)that);
- return false;
- }
-
- public boolean equals(UserDefMetric that) {
- if (that == null)
- return false;
-
- boolean this_present_type = true && this.is_set_type();
- boolean that_present_type = true && that.is_set_type();
- if (this_present_type || that_present_type) {
- if (!(this_present_type && that_present_type))
- return false;
- if (!this.type.equals(that.type))
- return false;
- }
-
- boolean this_present_name = true && this.is_set_name();
- boolean that_present_name = true && that.is_set_name();
- if (this_present_name || that_present_name) {
- if (!(this_present_name && that_present_name))
- return false;
- if (!this.name.equals(that.name))
- return false;
- }
-
- boolean this_present_value = true;
- boolean that_present_value = true;
- if (this_present_value || that_present_value) {
- if (!(this_present_value && that_present_value))
- return false;
- if (this.value != that.value)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_type = true && (is_set_type());
- builder.append(present_type);
- if (present_type)
- builder.append(type);
-
- boolean present_name = true && (is_set_name());
- builder.append(present_name);
- if (present_name)
- builder.append(name);
-
- boolean present_value = true;
- builder.append(present_value);
- if (present_value)
- builder.append(value);
-
- return builder.toHashCode();
- }
-
- public int compareTo(UserDefMetric other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- UserDefMetric typedOther = (UserDefMetric)other;
-
- lastComparison = Boolean.valueOf(is_set_type()).compareTo(typedOther.is_set_type());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_type()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.type, typedOther.type);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_name()).compareTo(typedOther.is_set_name());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_name()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.name, typedOther.name);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_value()).compareTo(typedOther.is_set_value());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_value()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.value, typedOther.value);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // TYPE
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.type = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // NAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.name = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // VALUE
- if (field.type == org.apache.thrift7.protocol.TType.DOUBLE) {
- this.value = iprot.readDouble();
- set_value_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.type != null) {
- oprot.writeFieldBegin(TYPE_FIELD_DESC);
- oprot.writeString(this.type);
- oprot.writeFieldEnd();
- }
- if (this.name != null) {
- oprot.writeFieldBegin(NAME_FIELD_DESC);
- oprot.writeString(this.name);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(VALUE_FIELD_DESC);
- oprot.writeDouble(this.value);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("UserDefMetric(");
- boolean first = true;
-
- sb.append("type:");
- if (this.type == null) {
- sb.append("null");
- } else {
- sb.append(this.type);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("name:");
- if (this.name == null) {
- sb.append("null");
- } else {
- sb.append(this.name);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("value:");
- sb.append(this.value);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_type()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'type' is unset! Struct:" + toString());
- }
-
- if (!is_set_name()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'name' is unset! Struct:" + toString());
- }
-
- if (!is_set_value()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'value' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/WorkerMetricData.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/WorkerMetricData.java b/jstorm-client/src/main/java/backtype/storm/generated/WorkerMetricData.java
deleted file mode 100644
index e9bc073..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/WorkerMetricData.java
+++ /dev/null
@@ -1,1135 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class WorkerMetricData implements org.apache.thrift7.TBase<WorkerMetricData, WorkerMetricData._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("WorkerMetricData");
-
- private static final org.apache.thrift7.protocol.TField HOSTNAME_FIELD_DESC = new org.apache.thrift7.protocol.TField("hostname", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField PORT_FIELD_DESC = new org.apache.thrift7.protocol.TField("port", org.apache.thrift7.protocol.TType.I32, (short)2);
- private static final org.apache.thrift7.protocol.TField GAUGE_FIELD_DESC = new org.apache.thrift7.protocol.TField("gauge", org.apache.thrift7.protocol.TType.MAP, (short)3);
- private static final org.apache.thrift7.protocol.TField COUNTER_FIELD_DESC = new org.apache.thrift7.protocol.TField("counter", org.apache.thrift7.protocol.TType.MAP, (short)4);
- private static final org.apache.thrift7.protocol.TField METER_FIELD_DESC = new org.apache.thrift7.protocol.TField("meter", org.apache.thrift7.protocol.TType.MAP, (short)5);
- private static final org.apache.thrift7.protocol.TField TIMER_FIELD_DESC = new org.apache.thrift7.protocol.TField("timer", org.apache.thrift7.protocol.TType.MAP, (short)6);
- private static final org.apache.thrift7.protocol.TField HISTOGRAM_FIELD_DESC = new org.apache.thrift7.protocol.TField("histogram", org.apache.thrift7.protocol.TType.MAP, (short)7);
-
- private String hostname; // required
- private int port; // required
- private Map<String,Double> gauge; // required
- private Map<String,Double> counter; // required
- private Map<String,Double> meter; // required
- private Map<String,Double> timer; // required
- private Map<String,Double> histogram; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- HOSTNAME((short)1, "hostname"),
- PORT((short)2, "port"),
- GAUGE((short)3, "gauge"),
- COUNTER((short)4, "counter"),
- METER((short)5, "meter"),
- TIMER((short)6, "timer"),
- HISTOGRAM((short)7, "histogram");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // HOSTNAME
- return HOSTNAME;
- case 2: // PORT
- return PORT;
- case 3: // GAUGE
- return GAUGE;
- case 4: // COUNTER
- return COUNTER;
- case 5: // METER
- return METER;
- case 6: // TIMER
- return TIMER;
- case 7: // HISTOGRAM
- return HISTOGRAM;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __PORT_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.HOSTNAME, new org.apache.thrift7.meta_data.FieldMetaData("hostname", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.PORT, new org.apache.thrift7.meta_data.FieldMetaData("port", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.GAUGE, new org.apache.thrift7.meta_data.FieldMetaData("gauge", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.COUNTER, new org.apache.thrift7.meta_data.FieldMetaData("counter", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.METER, new org.apache.thrift7.meta_data.FieldMetaData("meter", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.TIMER, new org.apache.thrift7.meta_data.FieldMetaData("timer", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.HISTOGRAM, new org.apache.thrift7.meta_data.FieldMetaData("histogram", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(WorkerMetricData.class, metaDataMap);
- }
-
- public WorkerMetricData() {
- }
-
- public WorkerMetricData(
- String hostname,
- int port,
- Map<String,Double> gauge,
- Map<String,Double> counter,
- Map<String,Double> meter,
- Map<String,Double> timer,
- Map<String,Double> histogram)
- {
- this();
- this.hostname = hostname;
- this.port = port;
- set_port_isSet(true);
- this.gauge = gauge;
- this.counter = counter;
- this.meter = meter;
- this.timer = timer;
- this.histogram = histogram;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public WorkerMetricData(WorkerMetricData other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_hostname()) {
- this.hostname = other.hostname;
- }
- this.port = other.port;
- if (other.is_set_gauge()) {
- Map<String,Double> __this__gauge = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.gauge.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__gauge_copy_key = other_element_key;
-
- Double __this__gauge_copy_value = other_element_value;
-
- __this__gauge.put(__this__gauge_copy_key, __this__gauge_copy_value);
- }
- this.gauge = __this__gauge;
- }
- if (other.is_set_counter()) {
- Map<String,Double> __this__counter = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.counter.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__counter_copy_key = other_element_key;
-
- Double __this__counter_copy_value = other_element_value;
-
- __this__counter.put(__this__counter_copy_key, __this__counter_copy_value);
- }
- this.counter = __this__counter;
- }
- if (other.is_set_meter()) {
- Map<String,Double> __this__meter = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.meter.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__meter_copy_key = other_element_key;
-
- Double __this__meter_copy_value = other_element_value;
-
- __this__meter.put(__this__meter_copy_key, __this__meter_copy_value);
- }
- this.meter = __this__meter;
- }
- if (other.is_set_timer()) {
- Map<String,Double> __this__timer = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.timer.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__timer_copy_key = other_element_key;
-
- Double __this__timer_copy_value = other_element_value;
-
- __this__timer.put(__this__timer_copy_key, __this__timer_copy_value);
- }
- this.timer = __this__timer;
- }
- if (other.is_set_histogram()) {
- Map<String,Double> __this__histogram = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.histogram.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__histogram_copy_key = other_element_key;
-
- Double __this__histogram_copy_value = other_element_value;
-
- __this__histogram.put(__this__histogram_copy_key, __this__histogram_copy_value);
- }
- this.histogram = __this__histogram;
- }
- }
-
- public WorkerMetricData deepCopy() {
- return new WorkerMetricData(this);
- }
-
- @Override
- public void clear() {
- this.hostname = null;
- set_port_isSet(false);
- this.port = 0;
- this.gauge = null;
- this.counter = null;
- this.meter = null;
- this.timer = null;
- this.histogram = null;
- }
-
- public String get_hostname() {
- return this.hostname;
- }
-
- public void set_hostname(String hostname) {
- this.hostname = hostname;
- }
-
- public void unset_hostname() {
- this.hostname = null;
- }
-
- /** Returns true if field hostname is set (has been assigned a value) and false otherwise */
- public boolean is_set_hostname() {
- return this.hostname != null;
- }
-
- public void set_hostname_isSet(boolean value) {
- if (!value) {
- this.hostname = null;
- }
- }
-
- public int get_port() {
- return this.port;
- }
-
- public void set_port(int port) {
- this.port = port;
- set_port_isSet(true);
- }
-
- public void unset_port() {
- __isset_bit_vector.clear(__PORT_ISSET_ID);
- }
-
- /** Returns true if field port is set (has been assigned a value) and false otherwise */
- public boolean is_set_port() {
- return __isset_bit_vector.get(__PORT_ISSET_ID);
- }
-
- public void set_port_isSet(boolean value) {
- __isset_bit_vector.set(__PORT_ISSET_ID, value);
- }
-
- public int get_gauge_size() {
- return (this.gauge == null) ? 0 : this.gauge.size();
- }
-
- public void put_to_gauge(String key, double val) {
- if (this.gauge == null) {
- this.gauge = new HashMap<String,Double>();
- }
- this.gauge.put(key, val);
- }
-
- public Map<String,Double> get_gauge() {
- return this.gauge;
- }
-
- public void set_gauge(Map<String,Double> gauge) {
- this.gauge = gauge;
- }
-
- public void unset_gauge() {
- this.gauge = null;
- }
-
- /** Returns true if field gauge is set (has been assigned a value) and false otherwise */
- public boolean is_set_gauge() {
- return this.gauge != null;
- }
-
- public void set_gauge_isSet(boolean value) {
- if (!value) {
- this.gauge = null;
- }
- }
-
- public int get_counter_size() {
- return (this.counter == null) ? 0 : this.counter.size();
- }
-
- public void put_to_counter(String key, double val) {
- if (this.counter == null) {
- this.counter = new HashMap<String,Double>();
- }
- this.counter.put(key, val);
- }
-
- public Map<String,Double> get_counter() {
- return this.counter;
- }
-
- public void set_counter(Map<String,Double> counter) {
- this.counter = counter;
- }
-
- public void unset_counter() {
- this.counter = null;
- }
-
- /** Returns true if field counter is set (has been assigned a value) and false otherwise */
- public boolean is_set_counter() {
- return this.counter != null;
- }
-
- public void set_counter_isSet(boolean value) {
- if (!value) {
- this.counter = null;
- }
- }
-
- public int get_meter_size() {
- return (this.meter == null) ? 0 : this.meter.size();
- }
-
- public void put_to_meter(String key, double val) {
- if (this.meter == null) {
- this.meter = new HashMap<String,Double>();
- }
- this.meter.put(key, val);
- }
-
- public Map<String,Double> get_meter() {
- return this.meter;
- }
-
- public void set_meter(Map<String,Double> meter) {
- this.meter = meter;
- }
-
- public void unset_meter() {
- this.meter = null;
- }
-
- /** Returns true if field meter is set (has been assigned a value) and false otherwise */
- public boolean is_set_meter() {
- return this.meter != null;
- }
-
- public void set_meter_isSet(boolean value) {
- if (!value) {
- this.meter = null;
- }
- }
-
- public int get_timer_size() {
- return (this.timer == null) ? 0 : this.timer.size();
- }
-
- public void put_to_timer(String key, double val) {
- if (this.timer == null) {
- this.timer = new HashMap<String,Double>();
- }
- this.timer.put(key, val);
- }
-
- public Map<String,Double> get_timer() {
- return this.timer;
- }
-
- public void set_timer(Map<String,Double> timer) {
- this.timer = timer;
- }
-
- public void unset_timer() {
- this.timer = null;
- }
-
- /** Returns true if field timer is set (has been assigned a value) and false otherwise */
- public boolean is_set_timer() {
- return this.timer != null;
- }
-
- public void set_timer_isSet(boolean value) {
- if (!value) {
- this.timer = null;
- }
- }
-
- public int get_histogram_size() {
- return (this.histogram == null) ? 0 : this.histogram.size();
- }
-
- public void put_to_histogram(String key, double val) {
- if (this.histogram == null) {
- this.histogram = new HashMap<String,Double>();
- }
- this.histogram.put(key, val);
- }
-
- public Map<String,Double> get_histogram() {
- return this.histogram;
- }
-
- public void set_histogram(Map<String,Double> histogram) {
- this.histogram = histogram;
- }
-
- public void unset_histogram() {
- this.histogram = null;
- }
-
- /** Returns true if field histogram is set (has been assigned a value) and false otherwise */
- public boolean is_set_histogram() {
- return this.histogram != null;
- }
-
- public void set_histogram_isSet(boolean value) {
- if (!value) {
- this.histogram = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case HOSTNAME:
- if (value == null) {
- unset_hostname();
- } else {
- set_hostname((String)value);
- }
- break;
-
- case PORT:
- if (value == null) {
- unset_port();
- } else {
- set_port((Integer)value);
- }
- break;
-
- case GAUGE:
- if (value == null) {
- unset_gauge();
- } else {
- set_gauge((Map<String,Double>)value);
- }
- break;
-
- case COUNTER:
- if (value == null) {
- unset_counter();
- } else {
- set_counter((Map<String,Double>)value);
- }
- break;
-
- case METER:
- if (value == null) {
- unset_meter();
- } else {
- set_meter((Map<String,Double>)value);
- }
- break;
-
- case TIMER:
- if (value == null) {
- unset_timer();
- } else {
- set_timer((Map<String,Double>)value);
- }
- break;
-
- case HISTOGRAM:
- if (value == null) {
- unset_histogram();
- } else {
- set_histogram((Map<String,Double>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case HOSTNAME:
- return get_hostname();
-
- case PORT:
- return Integer.valueOf(get_port());
-
- case GAUGE:
- return get_gauge();
-
- case COUNTER:
- return get_counter();
-
- case METER:
- return get_meter();
-
- case TIMER:
- return get_timer();
-
- case HISTOGRAM:
- return get_histogram();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case HOSTNAME:
- return is_set_hostname();
- case PORT:
- return is_set_port();
- case GAUGE:
- return is_set_gauge();
- case COUNTER:
- return is_set_counter();
- case METER:
- return is_set_meter();
- case TIMER:
- return is_set_timer();
- case HISTOGRAM:
- return is_set_histogram();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof WorkerMetricData)
- return this.equals((WorkerMetricData)that);
- return false;
- }
-
- public boolean equals(WorkerMetricData that) {
- if (that == null)
- return false;
-
- boolean this_present_hostname = true && this.is_set_hostname();
- boolean that_present_hostname = true && that.is_set_hostname();
- if (this_present_hostname || that_present_hostname) {
- if (!(this_present_hostname && that_present_hostname))
- return false;
- if (!this.hostname.equals(that.hostname))
- return false;
- }
-
- boolean this_present_port = true;
- boolean that_present_port = true;
- if (this_present_port || that_present_port) {
- if (!(this_present_port && that_present_port))
- return false;
- if (this.port != that.port)
- return false;
- }
-
- boolean this_present_gauge = true && this.is_set_gauge();
- boolean that_present_gauge = true && that.is_set_gauge();
- if (this_present_gauge || that_present_gauge) {
- if (!(this_present_gauge && that_present_gauge))
- return false;
- if (!this.gauge.equals(that.gauge))
- return false;
- }
-
- boolean this_present_counter = true && this.is_set_counter();
- boolean that_present_counter = true && that.is_set_counter();
- if (this_present_counter || that_present_counter) {
- if (!(this_present_counter && that_present_counter))
- return false;
- if (!this.counter.equals(that.counter))
- return false;
- }
-
- boolean this_present_meter = true && this.is_set_meter();
- boolean that_present_meter = true && that.is_set_meter();
- if (this_present_meter || that_present_meter) {
- if (!(this_present_meter && that_present_meter))
- return false;
- if (!this.meter.equals(that.meter))
- return false;
- }
-
- boolean this_present_timer = true && this.is_set_timer();
- boolean that_present_timer = true && that.is_set_timer();
- if (this_present_timer || that_present_timer) {
- if (!(this_present_timer && that_present_timer))
- return false;
- if (!this.timer.equals(that.timer))
- return false;
- }
-
- boolean this_present_histogram = true && this.is_set_histogram();
- boolean that_present_histogram = true && that.is_set_histogram();
- if (this_present_histogram || that_present_histogram) {
- if (!(this_present_histogram && that_present_histogram))
- return false;
- if (!this.histogram.equals(that.histogram))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_hostname = true && (is_set_hostname());
- builder.append(present_hostname);
- if (present_hostname)
- builder.append(hostname);
-
- boolean present_port = true;
- builder.append(present_port);
- if (present_port)
- builder.append(port);
-
- boolean present_gauge = true && (is_set_gauge());
- builder.append(present_gauge);
- if (present_gauge)
- builder.append(gauge);
-
- boolean present_counter = true && (is_set_counter());
- builder.append(present_counter);
- if (present_counter)
- builder.append(counter);
-
- boolean present_meter = true && (is_set_meter());
- builder.append(present_meter);
- if (present_meter)
- builder.append(meter);
-
- boolean present_timer = true && (is_set_timer());
- builder.append(present_timer);
- if (present_timer)
- builder.append(timer);
-
- boolean present_histogram = true && (is_set_histogram());
- builder.append(present_histogram);
- if (present_histogram)
- builder.append(histogram);
-
- return builder.toHashCode();
- }
-
- public int compareTo(WorkerMetricData other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- WorkerMetricData typedOther = (WorkerMetricData)other;
-
- lastComparison = Boolean.valueOf(is_set_hostname()).compareTo(typedOther.is_set_hostname());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_hostname()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.hostname, typedOther.hostname);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_port()).compareTo(typedOther.is_set_port());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_port()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.port, typedOther.port);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_gauge()).compareTo(typedOther.is_set_gauge());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_gauge()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.gauge, typedOther.gauge);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_counter()).compareTo(typedOther.is_set_counter());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_counter()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.counter, typedOther.counter);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_meter()).compareTo(typedOther.is_set_meter());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_meter()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.meter, typedOther.meter);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_timer()).compareTo(typedOther.is_set_timer());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_timer()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.timer, typedOther.timer);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_histogram()).compareTo(typedOther.is_set_histogram());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_histogram()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.histogram, typedOther.histogram);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // HOSTNAME
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.hostname = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // PORT
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.port = iprot.readI32();
- set_port_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // GAUGE
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map234 = iprot.readMapBegin();
- this.gauge = new HashMap<String,Double>(2*_map234.size);
- for (int _i235 = 0; _i235 < _map234.size; ++_i235)
- {
- String _key236; // required
- double _val237; // required
- _key236 = iprot.readString();
- _val237 = iprot.readDouble();
- this.gauge.put(_key236, _val237);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // COUNTER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map238 = iprot.readMapBegin();
- this.counter = new HashMap<String,Double>(2*_map238.size);
- for (int _i239 = 0; _i239 < _map238.size; ++_i239)
- {
- String _key240; // required
- double _val241; // required
- _key240 = iprot.readString();
- _val241 = iprot.readDouble();
- this.counter.put(_key240, _val241);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // METER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map242 = iprot.readMapBegin();
- this.meter = new HashMap<String,Double>(2*_map242.size);
- for (int _i243 = 0; _i243 < _map242.size; ++_i243)
- {
- String _key244; // required
- double _val245; // required
- _key244 = iprot.readString();
- _val245 = iprot.readDouble();
- this.meter.put(_key244, _val245);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // TIMER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map246 = iprot.readMapBegin();
- this.timer = new HashMap<String,Double>(2*_map246.size);
- for (int _i247 = 0; _i247 < _map246.size; ++_i247)
- {
- String _key248; // required
- double _val249; // required
- _key248 = iprot.readString();
- _val249 = iprot.readDouble();
- this.timer.put(_key248, _val249);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 7: // HISTOGRAM
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map250 = iprot.readMapBegin();
- this.histogram = new HashMap<String,Double>(2*_map250.size);
- for (int _i251 = 0; _i251 < _map250.size; ++_i251)
- {
- String _key252; // required
- double _val253; // required
- _key252 = iprot.readString();
- _val253 = iprot.readDouble();
- this.histogram.put(_key252, _val253);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.hostname != null) {
- oprot.writeFieldBegin(HOSTNAME_FIELD_DESC);
- oprot.writeString(this.hostname);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(PORT_FIELD_DESC);
- oprot.writeI32(this.port);
- oprot.writeFieldEnd();
- if (this.gauge != null) {
- oprot.writeFieldBegin(GAUGE_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.gauge.size()));
- for (Map.Entry<String, Double> _iter254 : this.gauge.entrySet())
- {
- oprot.writeString(_iter254.getKey());
- oprot.writeDouble(_iter254.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.counter != null) {
- oprot.writeFieldBegin(COUNTER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.counter.size()));
- for (Map.Entry<String, Double> _iter255 : this.counter.entrySet())
- {
- oprot.writeString(_iter255.getKey());
- oprot.writeDouble(_iter255.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.meter != null) {
- oprot.writeFieldBegin(METER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.meter.size()));
- for (Map.Entry<String, Double> _iter256 : this.meter.entrySet())
- {
- oprot.writeString(_iter256.getKey());
- oprot.writeDouble(_iter256.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.timer != null) {
- oprot.writeFieldBegin(TIMER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.timer.size()));
- for (Map.Entry<String, Double> _iter257 : this.timer.entrySet())
- {
- oprot.writeString(_iter257.getKey());
- oprot.writeDouble(_iter257.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.histogram != null) {
- oprot.writeFieldBegin(HISTOGRAM_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.histogram.size()));
- for (Map.Entry<String, Double> _iter258 : this.histogram.entrySet())
- {
- oprot.writeString(_iter258.getKey());
- oprot.writeDouble(_iter258.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("WorkerMetricData(");
- boolean first = true;
-
- sb.append("hostname:");
- if (this.hostname == null) {
- sb.append("null");
- } else {
- sb.append(this.hostname);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("port:");
- sb.append(this.port);
- first = false;
- if (!first) sb.append(", ");
- sb.append("gauge:");
- if (this.gauge == null) {
- sb.append("null");
- } else {
- sb.append(this.gauge);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("counter:");
- if (this.counter == null) {
- sb.append("null");
- } else {
- sb.append(this.counter);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("meter:");
- if (this.meter == null) {
- sb.append("null");
- } else {
- sb.append(this.meter);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("timer:");
- if (this.timer == null) {
- sb.append("null");
- } else {
- sb.append(this.timer);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("histogram:");
- if (this.histogram == null) {
- sb.append("null");
- } else {
- sb.append(this.histogram);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_hostname()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'hostname' is unset! Struct:" + toString());
- }
-
- if (!is_set_port()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'port' is unset! Struct:" + toString());
- }
-
- if (!is_set_gauge()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'gauge' is unset! Struct:" + toString());
- }
-
- if (!is_set_counter()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'counter' is unset! Struct:" + toString());
- }
-
- if (!is_set_meter()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'meter' is unset! Struct:" + toString());
- }
-
- if (!is_set_timer()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'timer' is unset! Struct:" + toString());
- }
-
- if (!is_set_histogram()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'histogram' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[19/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/StateQueryProcessor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/StateQueryProcessor.java b/jstorm-client/src/main/java/storm/trident/planner/processor/StateQueryProcessor.java
deleted file mode 100644
index 878fb6c..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/StateQueryProcessor.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.planner.ProcessorContext;
-import storm.trident.planner.TridentProcessor;
-import storm.trident.state.QueryFunction;
-import storm.trident.state.State;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTuple.Factory;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class StateQueryProcessor implements TridentProcessor {
- QueryFunction _function;
- State _state;
- String _stateId;
- TridentContext _context;
- Fields _inputFields;
- ProjectionFactory _projection;
- AppendCollector _collector;
-
- public StateQueryProcessor(String stateId, Fields inputFields, QueryFunction function) {
- _stateId = stateId;
- _function = function;
- _inputFields = inputFields;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, TridentContext tridentContext) {
- List<Factory> parents = tridentContext.getParentTupleFactories();
- if(parents.size()!=1) {
- throw new RuntimeException("State query operation can only have one parent");
- }
- _context = tridentContext;
- _state = (State) context.getTaskData(_stateId);
- _projection = new ProjectionFactory(parents.get(0), _inputFields);
- _collector = new AppendCollector(tridentContext);
- _function.prepare(conf, new TridentOperationContext(context, _projection));
- }
-
- @Override
- public void cleanup() {
- _function.cleanup();
- }
-
- @Override
- public void startBatch(ProcessorContext processorContext) {
- processorContext.state[_context.getStateIndex()] = new BatchState();
- }
-
- @Override
- public void execute(ProcessorContext processorContext, String streamId, TridentTuple tuple) {
- BatchState state = (BatchState) processorContext.state[_context.getStateIndex()];
- state.tuples.add(tuple);
- state.args.add(_projection.create(tuple));
- }
-
- @Override
- public void finishBatch(ProcessorContext processorContext) {
- BatchState state = (BatchState) processorContext.state[_context.getStateIndex()];
- if(!state.tuples.isEmpty()) {
- List<Object> results = _function.batchRetrieve(_state, state.args);
- if(results.size()!=state.tuples.size()) {
- throw new RuntimeException("Results size is different than argument size: " + results.size() + " vs " + state.tuples.size());
- }
- for(int i=0; i<state.tuples.size(); i++) {
- TridentTuple tuple = state.tuples.get(i);
- Object result = results.get(i);
- _collector.setContext(processorContext, tuple);
- _function.execute(_projection.create(tuple), result, _collector);
- }
- }
- }
-
- private static class BatchState {
- public List<TridentTuple> tuples = new ArrayList<TridentTuple>();
- public List<TridentTuple> args = new ArrayList<TridentTuple>();
- }
-
- @Override
- public Factory getOutputFactory() {
- return _collector.getOutputFactory();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/planner/processor/TridentContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/planner/processor/TridentContext.java b/jstorm-client/src/main/java/storm/trident/planner/processor/TridentContext.java
deleted file mode 100644
index ae39768..0000000
--- a/jstorm-client/src/main/java/storm/trident/planner/processor/TridentContext.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package storm.trident.planner.processor;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import storm.trident.planner.TupleReceiver;
-import storm.trident.tuple.TridentTuple.Factory;
-
-
-public class TridentContext {
- Fields selfFields;
- List<Factory> parentFactories;
- List<String> parentStreams;
- List<TupleReceiver> receivers;
- String outStreamId;
- int stateIndex;
- BatchOutputCollector collector;
-
- public TridentContext(Fields selfFields, List<Factory> parentFactories,
- List<String> parentStreams, List<TupleReceiver> receivers,
- String outStreamId, int stateIndex, BatchOutputCollector collector) {
- this.selfFields = selfFields;
- this.parentFactories = parentFactories;
- this.parentStreams = parentStreams;
- this.receivers = receivers;
- this.outStreamId = outStreamId;
- this.stateIndex = stateIndex;
- this.collector = collector;
- }
-
- public List<Factory> getParentTupleFactories() {
- return parentFactories;
- }
-
- public Fields getSelfOutputFields() {
- return selfFields;
- }
-
- public List<String> getParentStreams() {
- return parentStreams;
- }
-
- public List<TupleReceiver> getReceivers() {
- return receivers;
- }
-
- public String getOutStreamId() {
- return outStreamId;
- }
-
- public int getStateIndex() {
- return stateIndex;
- }
-
- //for reporting errors
- public BatchOutputCollector getDelegateCollector() {
- return collector;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/BatchSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/BatchSpoutExecutor.java b/jstorm-client/src/main/java/storm/trident/spout/BatchSpoutExecutor.java
deleted file mode 100644
index 4e5fc99..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/BatchSpoutExecutor.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import storm.trident.topology.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-
-public class BatchSpoutExecutor implements ITridentSpout {
- public static class EmptyCoordinator implements BatchCoordinator {
- @Override
- public Object initializeTransaction(long txid, Object prevMetadata, Object currMetadata) {
- return null;
- }
-
- @Override
- public void close() {
- }
-
- @Override
- public void success(long txid) {
- }
-
- @Override
- public boolean isReady(long txid) {
- return true;
- }
- }
-
- public class BatchSpoutEmitter implements Emitter {
-
- @Override
- public void emitBatch(TransactionAttempt tx, Object coordinatorMeta, TridentCollector collector) {
- _spout.emitBatch(tx.getTransactionId(), collector);
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- _spout.ack(tx.getTransactionId());
- }
-
- @Override
- public void close() {
- _spout.close();
- }
- }
-
- IBatchSpout _spout;
-
- public BatchSpoutExecutor(IBatchSpout spout) {
- _spout = spout;
- }
-
- @Override
- public BatchCoordinator getCoordinator(String txStateId, Map conf, TopologyContext context) {
- return new EmptyCoordinator();
- }
-
- @Override
- public Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- _spout.open(conf, context);
- return new BatchSpoutEmitter();
- }
-
- @Override
- public Map getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
- @Override
- public Fields getOutputFields() {
- return _spout.getOutputFields();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/IBatchID.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/IBatchID.java b/jstorm-client/src/main/java/storm/trident/spout/IBatchID.java
deleted file mode 100644
index e41d8e2..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/IBatchID.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package storm.trident.spout;
-
-
-public interface IBatchID {
- Object getId();
- int getAttemptId();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/IBatchSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/IBatchSpout.java b/jstorm-client/src/main/java/storm/trident/spout/IBatchSpout.java
deleted file mode 100644
index 8034026..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/IBatchSpout.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-
-public interface IBatchSpout extends Serializable {
- void open(Map conf, TopologyContext context);
- void emitBatch(long batchId, TridentCollector collector);
- void ack(long batchId);
- void close();
- Map getComponentConfiguration();
- Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/ICommitterTridentSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/ICommitterTridentSpout.java b/jstorm-client/src/main/java/storm/trident/spout/ICommitterTridentSpout.java
deleted file mode 100644
index e163c77..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/ICommitterTridentSpout.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import storm.trident.topology.TransactionAttempt;
-import java.util.Map;
-
-public interface ICommitterTridentSpout<X> extends ITridentSpout<X> {
- public interface Emitter extends ITridentSpout.Emitter {
- void commit(TransactionAttempt attempt);
- }
-
- @Override
- public Emitter getEmitter(String txStateId, Map conf, TopologyContext context);
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/IOpaquePartitionedTridentSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/IOpaquePartitionedTridentSpout.java b/jstorm-client/src/main/java/storm/trident/spout/IOpaquePartitionedTridentSpout.java
deleted file mode 100644
index 7a0e254..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/IOpaquePartitionedTridentSpout.java
+++ /dev/null
@@ -1,45 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.TransactionAttempt;
-
-/**
- * This defines a transactional spout which does *not* necessarily
- * replay the same batch every time it emits a batch for a transaction id.
- *
- */
-public interface IOpaquePartitionedTridentSpout<Partitions, Partition extends ISpoutPartition, M> extends Serializable {
- public interface Coordinator<Partitions> {
- boolean isReady(long txid);
- Partitions getPartitionsForBatch();
- void close();
- }
-
- public interface Emitter<Partitions, Partition extends ISpoutPartition, M> {
- /**
- * Emit a batch of tuples for a partition/transaction.
- *
- * Return the metadata describing this batch that will be used as lastPartitionMeta
- * for defining the parameters of the next batch.
- */
- M emitPartitionBatch(TransactionAttempt tx, TridentCollector collector, Partition partition, M lastPartitionMeta);
-
- /**
- * This method is called when this task is responsible for a new set of partitions. Should be used
- * to manage things like connections to brokers.
- */
- void refreshPartitions(List<Partition> partitionResponsibilities);
- List<Partition> getOrderedPartitions(Partitions allPartitionInfo);
- void close();
- }
-
- Emitter<Partitions, Partition, M> getEmitter(Map conf, TopologyContext context);
- Coordinator getCoordinator(Map conf, TopologyContext context);
- Map getComponentConfiguration();
- Fields getOutputFields();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/IPartitionedTridentSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/IPartitionedTridentSpout.java b/jstorm-client/src/main/java/storm/trident/spout/IPartitionedTridentSpout.java
deleted file mode 100644
index c27bdc9..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/IPartitionedTridentSpout.java
+++ /dev/null
@@ -1,60 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.io.Serializable;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.TransactionAttempt;
-
-/**
- * This interface defines a transactional spout that reads its tuples from a partitioned set of
- * brokers. It automates the storing of metadata for each partition to ensure that the same batch
- * is always emitted for the same transaction id. The partition metadata is stored in Zookeeper.
- */
-public interface IPartitionedTridentSpout<Partitions, Partition extends ISpoutPartition, T> extends Serializable {
- public interface Coordinator<Partitions> {
- /**
- * Return the partitions currently in the source of data. The idea is
- * is that if a new partition is added and a prior transaction is replayed, it doesn't
- * emit tuples for the new partition because it knows what partitions were in
- * that transaction.
- */
- Partitions getPartitionsForBatch();
-
- boolean isReady(long txid);
-
- void close();
- }
-
- public interface Emitter<Partitions, Partition extends ISpoutPartition, X> {
-
- List<Partition> getOrderedPartitions(Partitions allPartitionInfo);
-
- /**
- * Emit a batch of tuples for a partition/transaction that's never been emitted before.
- * Return the metadata that can be used to reconstruct this partition/batch in the future.
- */
- X emitPartitionBatchNew(TransactionAttempt tx, TridentCollector collector, Partition partition, X lastPartitionMeta);
-
- /**
- * This method is called when this task is responsible for a new set of partitions. Should be used
- * to manage things like connections to brokers.
- */
- void refreshPartitions(List<Partition> partitionResponsibilities);
-
- /**
- * Emit a batch of tuples for a partition/transaction that has been emitted before, using
- * the metadata created when it was first emitted.
- */
- void emitPartitionBatch(TransactionAttempt tx, TridentCollector collector, Partition partition, X partitionMeta);
- void close();
- }
-
- Coordinator<Partitions> getCoordinator(Map conf, TopologyContext context);
- Emitter<Partitions, Partition, T> getEmitter(Map conf, TopologyContext context);
-
- Map getComponentConfiguration();
- Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/ISpoutPartition.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/ISpoutPartition.java b/jstorm-client/src/main/java/storm/trident/spout/ISpoutPartition.java
deleted file mode 100644
index 38a561f..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/ISpoutPartition.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.spout;
-
-public interface ISpoutPartition {
- /**
- * This is used as a Zookeeper node path for storing metadata.
- */
- String getId();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/ITridentSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/ITridentSpout.java b/jstorm-client/src/main/java/storm/trident/spout/ITridentSpout.java
deleted file mode 100644
index 37d8c11..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/ITridentSpout.java
+++ /dev/null
@@ -1,77 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import storm.trident.topology.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-
-
-public interface ITridentSpout<T> extends Serializable {
- public interface BatchCoordinator<X> {
- /**
- * Create metadata for this particular transaction id which has never
- * been emitted before. The metadata should contain whatever is necessary
- * to be able to replay the exact batch for the transaction at a later point.
- *
- * The metadata is stored in Zookeeper.
- *
- * Storm uses the Kryo serializations configured in the component configuration
- * for this spout to serialize and deserialize the metadata.
- *
- * @param txid The id of the transaction.
- * @param prevMetadata The metadata of the previous transaction
- * @param currMetadata The metadata for this transaction the last time it was initialized.
- * null if this is the first attempt
- * @return the metadata for this new transaction
- */
- X initializeTransaction(long txid, X prevMetadata, X currMetadata);
-
- void success(long txid);
-
- boolean isReady(long txid);
-
- /**
- * Release any resources from this coordinator.
- */
- void close();
- }
-
- public interface Emitter<X> {
- /**
- * Emit a batch for the specified transaction attempt and metadata for the transaction. The metadata
- * was created by the Coordinator in the initializeTranaction method. This method must always emit
- * the same batch of tuples across all tasks for the same transaction id.
- *
- */
- void emitBatch(TransactionAttempt tx, X coordinatorMeta, TridentCollector collector);
-
- /**
- * This attempt committed successfully, so all state for this commit and before can be safely cleaned up.
- */
- void success(TransactionAttempt tx);
-
- /**
- * Release any resources held by this emitter.
- */
- void close();
- }
-
- /**
- * The coordinator for a TransactionalSpout runs in a single thread and indicates when batches
- * of tuples should be emitted and when transactions should commit. The Coordinator that you provide
- * in a TransactionalSpout provides metadata for each transaction so that the transactions can be replayed.
- */
- BatchCoordinator<T> getCoordinator(String txStateId, Map conf, TopologyContext context);
-
- /**
- * The emitter for a TransactionalSpout runs as many tasks across the cluster. Emitters are responsible for
- * emitting batches of tuples for a transaction and must ensure that the same batch of tuples is always
- * emitted for the same transaction id.
- */
- Emitter<T> getEmitter(String txStateId, Map conf, TopologyContext context);
-
- Map getComponentConfiguration();
- Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/OpaquePartitionedTridentSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/OpaquePartitionedTridentSpoutExecutor.java b/jstorm-client/src/main/java/storm/trident/spout/OpaquePartitionedTridentSpoutExecutor.java
deleted file mode 100644
index 0e02205..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/OpaquePartitionedTridentSpoutExecutor.java
+++ /dev/null
@@ -1,184 +0,0 @@
-package storm.trident.spout;
-
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.TreeMap;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.state.RotatingTransactionalState;
-import storm.trident.topology.state.TransactionalState;
-import storm.trident.topology.TransactionAttempt;
-
-
-public class OpaquePartitionedTridentSpoutExecutor implements ICommitterTridentSpout<Object> {
- IOpaquePartitionedTridentSpout _spout;
-
- public class Coordinator implements ITridentSpout.BatchCoordinator<Object> {
- IOpaquePartitionedTridentSpout.Coordinator _coordinator;
-
- public Coordinator(Map conf, TopologyContext context) {
- _coordinator = _spout.getCoordinator(conf, context);
- }
-
- @Override
- public Object initializeTransaction(long txid, Object prevMetadata, Object currMetadata) {
- return _coordinator.getPartitionsForBatch();
- }
-
- @Override
- public void close() {
- _coordinator.close();
- }
-
- @Override
- public void success(long txid) {
- }
-
- @Override
- public boolean isReady(long txid) {
- return _coordinator.isReady(txid);
- }
- }
-
- static class EmitterPartitionState {
- public RotatingTransactionalState rotatingState;
- public ISpoutPartition partition;
-
- public EmitterPartitionState(RotatingTransactionalState s, ISpoutPartition p) {
- rotatingState = s;
- partition = p;
- }
- }
-
- public class Emitter implements ICommitterTridentSpout.Emitter {
- IOpaquePartitionedTridentSpout.Emitter _emitter;
- TransactionalState _state;
- TreeMap<Long, Map<String, Object>> _cachedMetas = new TreeMap<Long, Map<String, Object>>();
- Map<String, EmitterPartitionState> _partitionStates = new HashMap<String, EmitterPartitionState>();
- int _index;
- int _numTasks;
-
- public Emitter(String txStateId, Map conf, TopologyContext context) {
- _emitter = _spout.getEmitter(conf, context);
- _index = context.getThisTaskIndex();
- _numTasks = context.getComponentTasks(context.getThisComponentId()).size();
- _state = TransactionalState.newUserState(conf, txStateId);
- }
-
- Object _savedCoordinatorMeta = null;
- boolean _changedMeta = false;
-
- @Override
- public void emitBatch(TransactionAttempt tx, Object coordinatorMeta, TridentCollector collector) {
- if(_savedCoordinatorMeta==null || !_savedCoordinatorMeta.equals(coordinatorMeta)) {
- List<ISpoutPartition> partitions = _emitter.getOrderedPartitions(coordinatorMeta);
- _partitionStates.clear();
- List<ISpoutPartition> myPartitions = new ArrayList();
- for(int i=_index; i < partitions.size(); i+=_numTasks) {
- ISpoutPartition p = partitions.get(i);
- String id = p.getId();
- myPartitions.add(p);
- _partitionStates.put(id, new EmitterPartitionState(new RotatingTransactionalState(_state, id), p));
- }
- _emitter.refreshPartitions(myPartitions);
- _savedCoordinatorMeta = coordinatorMeta;
- _changedMeta = true;
- }
- Map<String, Object> metas = new HashMap<String, Object>();
- _cachedMetas.put(tx.getTransactionId(), metas);
-
- Entry<Long, Map<String, Object>> entry = _cachedMetas.lowerEntry(tx.getTransactionId());
- Map<String, Object> prevCached;
- if(entry!=null) {
- prevCached = entry.getValue();
- } else {
- prevCached = new HashMap<String, Object>();
- }
-
- for(String id: _partitionStates.keySet()) {
- EmitterPartitionState s = _partitionStates.get(id);
- s.rotatingState.removeState(tx.getTransactionId());
- Object lastMeta = prevCached.get(id);
- if(lastMeta==null) lastMeta = s.rotatingState.getLastState();
- Object meta = _emitter.emitPartitionBatch(tx, collector, s.partition, lastMeta);
- metas.put(id, meta);
- }
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- for(EmitterPartitionState state: _partitionStates.values()) {
- state.rotatingState.cleanupBefore(tx.getTransactionId());
- }
- }
-
- @Override
- public void commit(TransactionAttempt attempt) {
- // this code here handles a case where a previous commit failed, and the partitions
- // changed since the last commit. This clears out any state for the removed partitions
- // for this txid.
- // we make sure only a single task ever does this. we're also guaranteed that
- // it's impossible for there to be another writer to the directory for that partition
- // because only a single commit can be happening at once. this is because in order for
- // another attempt of the batch to commit, the batch phase must have succeeded in between.
- // hence, all tasks for the prior commit must have finished committing (whether successfully or not)
- if(_changedMeta && _index==0) {
- Set<String> validIds = new HashSet<String>();
- for(ISpoutPartition p: (List<ISpoutPartition>) _emitter.getOrderedPartitions(_savedCoordinatorMeta)) {
- validIds.add(p.getId());
- }
- for(String existingPartition: _state.list("")) {
- if(!validIds.contains(existingPartition)) {
- RotatingTransactionalState s = new RotatingTransactionalState(_state, existingPartition);
- s.removeState(attempt.getTransactionId());
- }
- }
- _changedMeta = false;
- }
-
- Long txid = attempt.getTransactionId();
- Map<String, Object> metas = _cachedMetas.remove(txid);
- for(String partitionId: metas.keySet()) {
- Object meta = metas.get(partitionId);
- _partitionStates.get(partitionId).rotatingState.overrideState(txid, meta);
- }
- }
-
- @Override
- public void close() {
- _emitter.close();
- }
- }
-
- public OpaquePartitionedTridentSpoutExecutor(IOpaquePartitionedTridentSpout spout) {
- _spout = spout;
- }
-
- @Override
- public ITridentSpout.BatchCoordinator<Object> getCoordinator(String txStateId, Map conf, TopologyContext context) {
- return new Coordinator(conf, context);
- }
-
- @Override
- public ICommitterTridentSpout.Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- return new Emitter(txStateId, conf, context);
- }
-
- @Override
- public Fields getOutputFields() {
- return _spout.getOutputFields();
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/PartitionedTridentSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/PartitionedTridentSpoutExecutor.java b/jstorm-client/src/main/java/storm/trident/spout/PartitionedTridentSpoutExecutor.java
deleted file mode 100644
index 484d275..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/PartitionedTridentSpoutExecutor.java
+++ /dev/null
@@ -1,154 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.TransactionAttempt;
-import storm.trident.topology.state.RotatingTransactionalState;
-import storm.trident.topology.state.TransactionalState;
-
-
-public class PartitionedTridentSpoutExecutor implements ITridentSpout<Integer> {
- IPartitionedTridentSpout _spout;
-
- public PartitionedTridentSpoutExecutor(IPartitionedTridentSpout spout) {
- _spout = spout;
- }
-
- public IPartitionedTridentSpout getPartitionedSpout() {
- return _spout;
- }
-
- class Coordinator implements ITridentSpout.BatchCoordinator<Object> {
- private IPartitionedTridentSpout.Coordinator _coordinator;
-
- public Coordinator(Map conf, TopologyContext context) {
- _coordinator = _spout.getCoordinator(conf, context);
- }
-
- @Override
- public Object initializeTransaction(long txid, Object prevMetadata, Object currMetadata) {
- if(currMetadata!=null) {
- return currMetadata;
- } else {
- return _coordinator.getPartitionsForBatch();
- }
- }
-
-
- @Override
- public void close() {
- _coordinator.close();
- }
-
- @Override
- public void success(long txid) {
- }
-
- @Override
- public boolean isReady(long txid) {
- return _coordinator.isReady(txid);
- }
- }
-
- static class EmitterPartitionState {
- public RotatingTransactionalState rotatingState;
- public ISpoutPartition partition;
-
- public EmitterPartitionState(RotatingTransactionalState s, ISpoutPartition p) {
- rotatingState = s;
- partition = p;
- }
- }
-
- class Emitter implements ITridentSpout.Emitter<Object> {
- private IPartitionedTridentSpout.Emitter _emitter;
- private TransactionalState _state;
- private Map<String, EmitterPartitionState> _partitionStates = new HashMap<String, EmitterPartitionState>();
- private int _index;
- private int _numTasks;
-
- public Emitter(String txStateId, Map conf, TopologyContext context) {
- _emitter = _spout.getEmitter(conf, context);
- _state = TransactionalState.newUserState(conf, txStateId);
- _index = context.getThisTaskIndex();
- _numTasks = context.getComponentTasks(context.getThisComponentId()).size();
- }
-
- Object _savedCoordinatorMeta = null;
-
-
- @Override
- public void emitBatch(final TransactionAttempt tx, final Object coordinatorMeta,
- final TridentCollector collector) {
- if(_savedCoordinatorMeta == null || !_savedCoordinatorMeta.equals(coordinatorMeta)) {
- List<ISpoutPartition> partitions = _emitter.getOrderedPartitions(coordinatorMeta);
- _partitionStates.clear();
- List<ISpoutPartition> myPartitions = new ArrayList();
- for(int i=_index; i < partitions.size(); i+=_numTasks) {
- ISpoutPartition p = partitions.get(i);
- String id = p.getId();
- myPartitions.add(p);
- _partitionStates.put(id, new EmitterPartitionState(new RotatingTransactionalState(_state, id), p));
- }
- _emitter.refreshPartitions(myPartitions);
- _savedCoordinatorMeta = coordinatorMeta;
- }
- for(EmitterPartitionState s: _partitionStates.values()) {
- RotatingTransactionalState state = s.rotatingState;
- final ISpoutPartition partition = s.partition;
- Object meta = state.getStateOrCreate(tx.getTransactionId(),
- new RotatingTransactionalState.StateInitializer() {
- @Override
- public Object init(long txid, Object lastState) {
- return _emitter.emitPartitionBatchNew(tx, collector, partition, lastState);
- }
- });
- // it's null if one of:
- // a) a later transaction batch was emitted before this, so we should skip this batch
- // b) if didn't exist and was created (in which case the StateInitializer was invoked and
- // it was emitted
- if(meta!=null) {
- _emitter.emitPartitionBatch(tx, collector, partition, meta);
- }
- }
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- for(EmitterPartitionState state: _partitionStates.values()) {
- state.rotatingState.cleanupBefore(tx.getTransactionId());
- }
- }
-
- @Override
- public void close() {
- _state.close();
- _emitter.close();
- }
- }
-
- @Override
- public ITridentSpout.BatchCoordinator getCoordinator(String txStateId, Map conf, TopologyContext context) {
- return new Coordinator(conf, context);
- }
-
- @Override
- public ITridentSpout.Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- return new Emitter(txStateId, conf, context);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
- @Override
- public Fields getOutputFields() {
- return _spout.getOutputFields();
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchExecutor.java b/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchExecutor.java
deleted file mode 100644
index f6a5a59..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchExecutor.java
+++ /dev/null
@@ -1,181 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.Config;
-import backtype.storm.spout.ISpoutOutputCollector;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.RotatingMap;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.TransactionAttempt;
-import storm.trident.util.TridentUtils;
-
-public class RichSpoutBatchExecutor implements ITridentSpout {
- public static final String MAX_BATCH_SIZE_CONF = "topology.spout.max.batch.size";
-
- IRichSpout _spout;
-
- public RichSpoutBatchExecutor(IRichSpout spout) {
- _spout = spout;
- }
-
- @Override
- public Map getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
- @Override
- public Fields getOutputFields() {
- return TridentUtils.getSingleOutputStreamFields(_spout);
-
- }
-
- @Override
- public BatchCoordinator getCoordinator(String txStateId, Map conf, TopologyContext context) {
- return new RichSpoutCoordinator();
- }
-
- @Override
- public Emitter getEmitter(String txStateId, Map conf, TopologyContext context) {
- return new RichSpoutEmitter(conf, context);
- }
-
- class RichSpoutEmitter implements ITridentSpout.Emitter<Object> {
- int _maxBatchSize;
- boolean prepared = false;
- CaptureCollector _collector;
- RotatingMap<Long, List<Object>> idsMap;
- Map _conf;
- TopologyContext _context;
- long lastRotate = System.currentTimeMillis();
- long rotateTime;
-
- public RichSpoutEmitter(Map conf, TopologyContext context) {
- _conf = conf;
- _context = context;
- Number batchSize = (Number) conf.get(MAX_BATCH_SIZE_CONF);
- if(batchSize==null) batchSize = 1000;
- _maxBatchSize = batchSize.intValue();
- _collector = new CaptureCollector();
- idsMap = new RotatingMap(3);
- rotateTime = 1000L * ((Number)conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)).intValue();
- }
-
- @Override
- public void emitBatch(TransactionAttempt tx, Object coordinatorMeta, TridentCollector collector) {
- long txid = tx.getTransactionId();
-
- long now = System.currentTimeMillis();
- if(now - lastRotate > rotateTime) {
- Map<Long, List<Object>> failed = idsMap.rotate();
- for(Long id: failed.keySet()) {
- //TODO: this isn't right... it's not in the map anymore
- fail(id);
- }
- lastRotate = now;
- }
-
- if(idsMap.containsKey(txid)) {
- fail(txid);
- }
-
- _collector.reset(collector);
- if(!prepared) {
- _spout.open(_conf, _context, new SpoutOutputCollector(_collector));
- prepared = true;
- }
- for(int i=0; i<_maxBatchSize; i++) {
- _spout.nextTuple();
- if(_collector.numEmitted < i) {
- break;
- }
- }
- idsMap.put(txid, _collector.ids);
-
- }
-
- @Override
- public void success(TransactionAttempt tx) {
- ack(tx.getTransactionId());
- }
-
- private void ack(long batchId) {
- List<Object> ids = (List<Object>) idsMap.remove(batchId);
- if(ids!=null) {
- for(Object id: ids) {
- _spout.ack(id);
- }
- }
- }
-
- private void fail(long batchId) {
- List<Object> ids = (List<Object>) idsMap.remove(batchId);
- if(ids!=null) {
- for(Object id: ids) {
- _spout.fail(id);
- }
- }
- }
-
- @Override
- public void close() {
- }
-
- }
-
- class RichSpoutCoordinator implements ITridentSpout.BatchCoordinator {
- @Override
- public Object initializeTransaction(long txid, Object prevMetadata, Object currMetadata) {
- return null;
- }
-
- @Override
- public void success(long txid) {
- }
-
- @Override
- public boolean isReady(long txid) {
- return true;
- }
-
- @Override
- public void close() {
- }
- }
-
- static class CaptureCollector implements ISpoutOutputCollector {
-
- TridentCollector _collector;
- public List<Object> ids;
- public int numEmitted;
-
- public void reset(TridentCollector c) {
- _collector = c;
- ids = new ArrayList<Object>();
- }
-
- @Override
- public void reportError(Throwable t) {
- _collector.reportError(t);
- }
-
- @Override
- public List<Integer> emit(String stream, List<Object> values, Object id) {
- if(id!=null) ids.add(id);
- numEmitted++;
- _collector.emit(values);
- return null;
- }
-
- @Override
- public void emitDirect(int task, String stream, List<Object> values, Object id) {
- throw new UnsupportedOperationException("Trident does not support direct streams");
- }
-
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchId.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchId.java b/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchId.java
deleted file mode 100644
index 1340d21..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchId.java
+++ /dev/null
@@ -1,32 +0,0 @@
-package storm.trident.spout;
-
-public class RichSpoutBatchId implements IBatchID {
- long _id;
-
- public RichSpoutBatchId(long id) {
- _id = id;
- }
-
- @Override
- public Object getId() {
- // this is to distinguish from TransactionAttempt
- return this;
- }
-
- @Override
- public int getAttemptId() {
- return 0; // each drpc request is always a single attempt
- }
-
- @Override
- public int hashCode() {
- return ((Long) _id).hashCode();
- }
-
- @Override
- public boolean equals(Object o) {
- if(!(o instanceof RichSpoutBatchId)) return false;
- RichSpoutBatchId other = (RichSpoutBatchId) o;
- return _id == other._id;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchIdSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchIdSerializer.java b/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchIdSerializer.java
deleted file mode 100644
index d544fa7..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchIdSerializer.java
+++ /dev/null
@@ -1,21 +0,0 @@
-package storm.trident.spout;
-
-import com.esotericsoftware.kryo.Kryo;
-import com.esotericsoftware.kryo.Serializer;
-import com.esotericsoftware.kryo.io.Input;
-import com.esotericsoftware.kryo.io.Output;
-
-
-public class RichSpoutBatchIdSerializer extends Serializer<RichSpoutBatchId> {
-
- @Override
- public void write(Kryo kryo, Output output, RichSpoutBatchId id) {
- output.writeLong(id._id);
- }
-
- @Override
- public RichSpoutBatchId read(Kryo kryo, Input input, Class type) {
- long l = input.readLong();
- return new RichSpoutBatchId(l);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchTriggerer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchTriggerer.java b/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchTriggerer.java
deleted file mode 100644
index 39878a1..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/RichSpoutBatchTriggerer.java
+++ /dev/null
@@ -1,161 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.Config;
-import backtype.storm.generated.Grouping;
-import backtype.storm.spout.ISpoutOutputCollector;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import storm.trident.topology.TridentBoltExecutor;
-import storm.trident.tuple.ConsList;
-import storm.trident.util.TridentUtils;
-
-
-public class RichSpoutBatchTriggerer implements IRichSpout {
-
- String _stream;
- IRichSpout _delegate;
- List<Integer> _outputTasks;
- Random _rand;
- String _coordStream;
-
- public RichSpoutBatchTriggerer(IRichSpout delegate, String streamName, String batchGroup) {
- _delegate = delegate;
- _stream = streamName;
- _coordStream = TridentBoltExecutor.COORD_STREAM(batchGroup);
- }
-
- @Override
- public void open(Map conf, TopologyContext context, SpoutOutputCollector collector) {
- _delegate.open(conf, context, new SpoutOutputCollector(new StreamOverrideCollector(collector)));
- _outputTasks = new ArrayList<Integer>();
- for(String component: Utils.get(context.getThisTargets(),
- _coordStream,
- new HashMap<String, Grouping>()).keySet()) {
- _outputTasks.addAll(context.getComponentTasks(component));
- }
- _rand = new Random(Utils.secureRandomLong());
- }
-
- @Override
- public void close() {
- _delegate.close();
- }
-
- @Override
- public void activate() {
- _delegate.activate();
- }
-
- @Override
- public void deactivate() {
- _delegate.deactivate();
- }
-
- @Override
- public void nextTuple() {
- _delegate.nextTuple();
- }
-
- @Override
- public void ack(Object msgId) {
- Long batchId = _msgIdToBatchId.remove((Long) msgId);
- FinishCondition cond = _finishConditions.get(batchId);
- if(cond!=null) {
- cond.vals.remove((Long) msgId);
- if(cond.vals.isEmpty()) {
- _finishConditions.remove(batchId);
- _delegate.ack(cond.msgId);
- }
- }
- }
-
- @Override
- public void fail(Object msgId) {
- Long batchId = _msgIdToBatchId.remove((Long) msgId);
- FinishCondition cond = _finishConditions.remove(batchId);
- if(cond!=null) {
- _delegate.fail(cond.msgId);
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- Fields outFields = TridentUtils.getSingleOutputStreamFields(_delegate);
- outFields = TridentUtils.fieldsConcat(new Fields("$id$"), outFields);
- declarer.declareStream(_stream, outFields);
- // try to find a way to merge this code with what's already done in TridentBoltExecutor
- declarer.declareStream(_coordStream, true, new Fields("id", "count"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Map<String, Object> conf = _delegate.getComponentConfiguration();
- if(conf==null) conf = new HashMap();
- else conf = new HashMap(conf);
- Config.registerSerialization(conf, RichSpoutBatchId.class, RichSpoutBatchIdSerializer.class);
- return conf;
- }
-
- static class FinishCondition {
- Set<Long> vals = new HashSet<Long>();
- Object msgId;
- }
-
- Map<Long, Long> _msgIdToBatchId = new HashMap();
-
- Map<Long, FinishCondition> _finishConditions = new HashMap();
-
- class StreamOverrideCollector implements ISpoutOutputCollector {
-
- SpoutOutputCollector _collector;
-
- public StreamOverrideCollector(SpoutOutputCollector collector) {
- _collector = collector;
- }
-
- @Override
- public List<Integer> emit(String ignore, List<Object> values, Object msgId) {
- long batchIdVal = _rand.nextLong();
- Object batchId = new RichSpoutBatchId(batchIdVal);
- FinishCondition finish = new FinishCondition();
- finish.msgId = msgId;
- List<Integer> tasks = _collector.emit(_stream, new ConsList(batchId, values));
- Set<Integer> outTasksSet = new HashSet<Integer>(tasks);
- for(Integer t: _outputTasks) {
- int count = 0;
- if(outTasksSet.contains(t)) {
- count = 1;
- }
- long r = _rand.nextLong();
- _collector.emitDirect(t, _coordStream, new Values(batchId, count), r);
- finish.vals.add(r);
- _msgIdToBatchId.put(r, batchIdVal);
- }
- _finishConditions.put(batchIdVal, finish);
- return tasks;
- }
-
- @Override
- public void emitDirect(int task, String ignore, List<Object> values, Object msgId) {
- throw new RuntimeException("Trident does not support direct emits from spouts");
- }
-
- @Override
- public void reportError(Throwable t) {
- _collector.reportError(t);
- }
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutCoordinator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutCoordinator.java b/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutCoordinator.java
deleted file mode 100644
index 7095f36..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutCoordinator.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.Config;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicOutputCollector;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import storm.trident.topology.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.Map;
-import org.apache.log4j.Logger;
-import storm.trident.topology.MasterBatchCoordinator;
-import storm.trident.topology.state.RotatingTransactionalState;
-import storm.trident.topology.state.TransactionalState;
-
-
-public class TridentSpoutCoordinator implements IBasicBolt {
- public static final Logger LOG = Logger.getLogger(TridentSpoutCoordinator.class);
- private static final String META_DIR = "meta";
-
- ITridentSpout _spout;
- ITridentSpout.BatchCoordinator _coord;
- RotatingTransactionalState _state;
- TransactionalState _underlyingState;
- String _id;
-
-
- public TridentSpoutCoordinator(String id, ITridentSpout spout) {
- _spout = spout;
- _id = id;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context) {
- _coord = _spout.getCoordinator(_id, conf, context);
- _underlyingState = TransactionalState.newCoordinatorState(conf, _id);
- _state = new RotatingTransactionalState(_underlyingState, META_DIR);
- }
-
- @Override
- public void execute(Tuple tuple, BasicOutputCollector collector) {
- TransactionAttempt attempt = (TransactionAttempt) tuple.getValue(0);
-
- if(tuple.getSourceStreamId().equals(MasterBatchCoordinator.SUCCESS_STREAM_ID)) {
- _state.cleanupBefore(attempt.getTransactionId());
- _coord.success(attempt.getTransactionId());
- } else if (tuple.getSourceStreamId().equals(MasterBatchCoordinator.COMMIT_STREAM_ID)) {
- // Do nothing.
- } else {
- long txid = attempt.getTransactionId();
- Object prevMeta = _state.getPreviousState(txid);
- Object meta = _coord.initializeTransaction(txid, prevMeta, _state.getState(txid));
- _state.overrideState(txid, meta);
- collector.emit(MasterBatchCoordinator.BATCH_STREAM_ID, new Values(attempt, meta));
- }
-
- }
-
- @Override
- public void cleanup() {
- _coord.close();
- _underlyingState.close();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declareStream(MasterBatchCoordinator.BATCH_STREAM_ID, new Fields("tx", "metadata"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- Config ret = new Config();
- ret.setMaxTaskParallelism(1);
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutExecutor.java b/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutExecutor.java
deleted file mode 100644
index 10b40ae..0000000
--- a/jstorm-client/src/main/java/storm/trident/spout/TridentSpoutExecutor.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package storm.trident.spout;
-
-import backtype.storm.coordination.BatchOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import storm.trident.topology.TransactionAttempt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-import org.apache.log4j.Logger;
-import storm.trident.operation.TridentCollector;
-import storm.trident.topology.BatchInfo;
-import storm.trident.topology.ITridentBatchBolt;
-import storm.trident.topology.MasterBatchCoordinator;
-import storm.trident.tuple.ConsList;
-
-public class TridentSpoutExecutor implements ITridentBatchBolt {
- public static String ID_FIELD = "$tx";
-
- public static Logger LOG = Logger.getLogger(TridentSpoutExecutor.class);
-
- AddIdCollector _collector;
- ITridentSpout _spout;
- ITridentSpout.Emitter _emitter;
- String _streamName;
- String _txStateId;
-
- TreeMap<Long, TransactionAttempt> _activeBatches = new TreeMap<Long, TransactionAttempt>();
-
- public TridentSpoutExecutor(String txStateId, String streamName, ITridentSpout spout) {
- _txStateId = txStateId;
- _spout = spout;
- _streamName = streamName;
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context, BatchOutputCollector collector) {
- _emitter = _spout.getEmitter(_txStateId, conf, context);
- _collector = new AddIdCollector(_streamName, collector);
- }
-
- @Override
- public void execute(BatchInfo info, Tuple input) {
- // there won't be a BatchInfo for the success stream
- TransactionAttempt attempt = (TransactionAttempt) input.getValue(0);
- if(input.getSourceStreamId().equals(MasterBatchCoordinator.COMMIT_STREAM_ID)) {
- if(attempt.equals(_activeBatches.get(attempt.getTransactionId()))) {
- ((ICommitterTridentSpout.Emitter) _emitter).commit(attempt);
- _activeBatches.remove(attempt.getTransactionId());
- } else {
- throw new FailedException("Received commit for different transaction attempt");
- }
- } else if(input.getSourceStreamId().equals(MasterBatchCoordinator.SUCCESS_STREAM_ID)) {
- // valid to delete before what's been committed since
- // those batches will never be accessed again
- _activeBatches.headMap(attempt.getTransactionId()).clear();
- _emitter.success(attempt);
- } else {
- _collector.setBatch(info.batchId);
- _emitter.emitBatch(attempt, input.getValue(1), _collector);
- _activeBatches.put(attempt.getTransactionId(), attempt);
- }
- }
-
- @Override
- public void cleanup() {
- _emitter.close();
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- List<String> fields = new ArrayList(_spout.getOutputFields().toList());
- fields.add(0, ID_FIELD);
- declarer.declareStream(_streamName, new Fields(fields));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _spout.getComponentConfiguration();
- }
-
- @Override
- public void finishBatch(BatchInfo batchInfo) {
- }
-
- @Override
- public Object initBatchState(String batchGroup, Object batchId) {
- return null;
- }
-
- private static class AddIdCollector implements TridentCollector {
- BatchOutputCollector _delegate;
- Object _id;
- String _stream;
-
- public AddIdCollector(String stream, BatchOutputCollector c) {
- _delegate = c;
- _stream = stream;
- }
-
-
- public void setBatch(Object id) {
- _id = id;
- }
-
- @Override
- public void emit(List<Object> values) {
- _delegate.emit(_stream, new ConsList(_id, values));
- }
-
- @Override
- public void reportError(Throwable t) {
- _delegate.reportError(t);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/BaseQueryFunction.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/BaseQueryFunction.java b/jstorm-client/src/main/java/storm/trident/state/BaseQueryFunction.java
deleted file mode 100644
index 49be20e..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/BaseQueryFunction.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.state;
-
-import storm.trident.operation.BaseOperation;
-
-
-public abstract class BaseQueryFunction<S extends State, T> extends BaseOperation implements QueryFunction<S, T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/BaseStateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/BaseStateUpdater.java b/jstorm-client/src/main/java/storm/trident/state/BaseStateUpdater.java
deleted file mode 100644
index 292c59f..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/BaseStateUpdater.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.state;
-
-import storm.trident.operation.BaseOperation;
-
-
-public abstract class BaseStateUpdater<S extends State> extends BaseOperation implements StateUpdater<S> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/CombinerValueUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/CombinerValueUpdater.java b/jstorm-client/src/main/java/storm/trident/state/CombinerValueUpdater.java
deleted file mode 100644
index ea2248f..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/CombinerValueUpdater.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package storm.trident.state;
-
-import storm.trident.operation.CombinerAggregator;
-
-public class CombinerValueUpdater implements ValueUpdater<Object> {
- Object arg;
- CombinerAggregator agg;
-
- public CombinerValueUpdater(CombinerAggregator agg, Object arg) {
- this.agg = agg;
- this.arg = arg;
- }
-
- @Override
- public Object update(Object stored) {
- if(stored==null) return arg;
- else return agg.combine(stored, arg);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/ITupleCollection.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/ITupleCollection.java b/jstorm-client/src/main/java/storm/trident/state/ITupleCollection.java
deleted file mode 100644
index 23c1253..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/ITupleCollection.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state;
-
-import java.util.Iterator;
-import java.util.List;
-
-/* Container of a collection of tuples */
-public interface ITupleCollection {
- public Iterator<List<Object>> getTuples();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/JSONNonTransactionalSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/JSONNonTransactionalSerializer.java b/jstorm-client/src/main/java/storm/trident/state/JSONNonTransactionalSerializer.java
deleted file mode 100644
index a289ddb..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/JSONNonTransactionalSerializer.java
+++ /dev/null
@@ -1,28 +0,0 @@
-package storm.trident.state;
-
-import java.io.UnsupportedEncodingException;
-
-import backtype.storm.utils.Utils;
-
-
-public class JSONNonTransactionalSerializer implements Serializer {
-
- @Override
- public byte[] serialize(Object obj) {
- try {
- return Utils.to_json(obj).getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public Object deserialize(byte[] b) {
- try {
- return Utils.from_json(new String(b, "UTF-8"));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/JSONOpaqueSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/JSONOpaqueSerializer.java b/jstorm-client/src/main/java/storm/trident/state/JSONOpaqueSerializer.java
deleted file mode 100644
index a96d457..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/JSONOpaqueSerializer.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package storm.trident.state;
-
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.List;
-
-import backtype.storm.utils.Utils;
-
-
-public class JSONOpaqueSerializer implements Serializer<OpaqueValue> {
-
- @Override
- public byte[] serialize(OpaqueValue obj) {
- List toSer = new ArrayList(3);
- toSer.add(obj.currTxid);
- toSer.add(obj.curr);
- toSer.add(obj.prev);
- try {
- return Utils.to_json(toSer).getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public OpaqueValue deserialize(byte[] b) {
- try {
- String s = new String(b, "UTF-8");
- List deser = (List) Utils.from_json(s);
- return new OpaqueValue((Long) deser.get(0), deser.get(1), deser.get(2));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/JSONTransactionalSerializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/JSONTransactionalSerializer.java b/jstorm-client/src/main/java/storm/trident/state/JSONTransactionalSerializer.java
deleted file mode 100644
index a0df3af..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/JSONTransactionalSerializer.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package storm.trident.state;
-
-import java.io.UnsupportedEncodingException;
-import java.util.ArrayList;
-import java.util.List;
-
-import backtype.storm.utils.Utils;
-
-
-public class JSONTransactionalSerializer implements Serializer<TransactionalValue> {
- @Override
- public byte[] serialize(TransactionalValue obj) {
- List toSer = new ArrayList(2);
- toSer.add(obj.getTxid());
- toSer.add(obj.getVal());
- try {
- return Utils.to_json(toSer).getBytes("UTF-8");
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public TransactionalValue deserialize(byte[] b) {
- try {
- String s = new String(b, "UTF-8");
- List deser = (List) Utils.from_json(s);
- return new TransactionalValue((Long) deser.get(0), deser.get(1));
- } catch (UnsupportedEncodingException e) {
- throw new RuntimeException(e);
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/OpaqueValue.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/OpaqueValue.java b/jstorm-client/src/main/java/storm/trident/state/OpaqueValue.java
deleted file mode 100644
index 14a39d4..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/OpaqueValue.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package storm.trident.state;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-
-public class OpaqueValue<T> {
- Long currTxid;
- T prev;
- T curr;
-
- public OpaqueValue(Long currTxid, T val, T prev) {
- this.curr = val;
- this.currTxid = currTxid;
- this.prev = prev;
- }
-
- public OpaqueValue(Long currTxid, T val) {
- this(currTxid, val, null);
- }
-
- public OpaqueValue<T> update(Long batchTxid, T newVal) {
- T prev;
- if(batchTxid==null || (this.currTxid < batchTxid)) {
- prev = this.curr;
- } else if(batchTxid.equals(this.currTxid)){
- prev = this.prev;
- } else {
- throw new RuntimeException("Current batch (" + batchTxid + ") is behind state's batch: " + this.toString());
- }
- return new OpaqueValue<T>(batchTxid, newVal, prev);
- }
-
- public T get(Long batchTxid) {
- if(batchTxid==null || (this.currTxid < batchTxid)) {
- return curr;
- } else if(batchTxid.equals(this.currTxid)){
- return prev;
- } else {
- throw new RuntimeException("Current batch (" + batchTxid + ") is behind state's batch: " + this.toString());
- }
- }
-
- public T getCurr() {
- return curr;
- }
-
- public Long getCurrTxid() {
- return currTxid;
- }
-
- public T getPrev() {
- return prev;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/QueryFunction.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/QueryFunction.java b/jstorm-client/src/main/java/storm/trident/state/QueryFunction.java
deleted file mode 100644
index 38eb41f..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/QueryFunction.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package storm.trident.state;
-
-import java.util.List;
-import storm.trident.operation.EachOperation;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-public interface QueryFunction<S extends State, T> extends EachOperation {
- List<T> batchRetrieve(S state, List<TridentTuple> args);
- void execute(TridentTuple tuple, T result, TridentCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/ReadOnlyState.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/ReadOnlyState.java b/jstorm-client/src/main/java/storm/trident/state/ReadOnlyState.java
deleted file mode 100644
index f8c62f0..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/ReadOnlyState.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.state;
-
-public class ReadOnlyState implements State {
-
- @Override
- public void beginCommit(Long txid) {
- throw new UnsupportedOperationException("This state is read-only and does not support updates");
- }
-
- @Override
- public void commit(Long txid) {
- throw new UnsupportedOperationException("This state is read-only and does not support updates");
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/ReducerValueUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/ReducerValueUpdater.java b/jstorm-client/src/main/java/storm/trident/state/ReducerValueUpdater.java
deleted file mode 100644
index 2ba3aec..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/ReducerValueUpdater.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package storm.trident.state;
-
-import java.util.List;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.tuple.TridentTuple;
-
-public class ReducerValueUpdater implements ValueUpdater<Object> {
- List<TridentTuple> tuples;
- ReducerAggregator agg;
-
- public ReducerValueUpdater(ReducerAggregator agg, List<TridentTuple> tuples) {
- this.agg = agg;
- this.tuples = tuples;
- }
-
- @Override
- public Object update(Object stored) {
- Object ret = (stored == null) ? this.agg.init() : stored;
- for(TridentTuple t: tuples) {
- ret = this.agg.reduce(ret, t);
- }
- return ret;
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/Serializer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/Serializer.java b/jstorm-client/src/main/java/storm/trident/state/Serializer.java
deleted file mode 100644
index 9f91a38..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/Serializer.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state;
-
-import java.io.Serializable;
-
-
-public interface Serializer<T> extends Serializable {
- byte[] serialize(T obj);
- T deserialize(byte[] b);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/State.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/State.java b/jstorm-client/src/main/java/storm/trident/state/State.java
deleted file mode 100644
index 93f7255..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/State.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package storm.trident.state;
-
-/**
- * There's 3 different kinds of state:
- *
- * 1. non-transactional: ignores commits, updates are permanent. no rollback. a cassandra incrementing state would be like this
- * 2. repeat-transactional: idempotent as long as all batches for a txid are identical
- * 3. opaque-transactional: the most general kind of state. updates are always done
- * based on the previous version of the value if the current commit = latest stored commit
- * Idempotent even if the batch for a txid can change.
- *
- * repeat transactional is idempotent for transactional spouts
- * opaque transactional is idempotent for opaque or transactional spouts
- *
- * Trident should log warnings when state is idempotent but updates will not be idempotent
- * because of spout
- */
-// retrieving is encapsulated in Retrieval interface
-public interface State {
- void beginCommit(Long txid); // can be null for things like partitionPersist occuring off a DRPC stream
- void commit(Long txid);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/StateFactory.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/StateFactory.java b/jstorm-client/src/main/java/storm/trident/state/StateFactory.java
deleted file mode 100644
index a77321b..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/StateFactory.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state;
-
-import backtype.storm.task.IMetricsContext;
-import java.io.Serializable;
-import java.util.Map;
-
-public interface StateFactory extends Serializable {
- State makeState(Map conf, IMetricsContext metrics, int partitionIndex, int numPartitions);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/StateSpec.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/StateSpec.java b/jstorm-client/src/main/java/storm/trident/state/StateSpec.java
deleted file mode 100644
index 569311e..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/StateSpec.java
+++ /dev/null
@@ -1,13 +0,0 @@
-package storm.trident.state;
-
-import java.io.Serializable;
-
-
-public class StateSpec implements Serializable {
- public StateFactory stateFactory;
- public Integer requiredNumPartitions = null;
-
- public StateSpec(StateFactory stateFactory) {
- this.stateFactory = stateFactory;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/StateType.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/StateType.java b/jstorm-client/src/main/java/storm/trident/state/StateType.java
deleted file mode 100644
index f77ec9d..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/StateType.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.state;
-
-
-public enum StateType {
- NON_TRANSACTIONAL,
- TRANSACTIONAL,
- OPAQUE
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/StateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/StateUpdater.java b/jstorm-client/src/main/java/storm/trident/state/StateUpdater.java
deleted file mode 100644
index 7a1f19c..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/StateUpdater.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package storm.trident.state;
-
-import java.util.List;
-import storm.trident.operation.Operation;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-
-public interface StateUpdater<S extends State> extends Operation {
- // maybe it needs a start phase (where it can do a retrieval, an update phase, and then a finish phase...?
- // shouldn't really be a one-at-a-time interface, since we have all the tuples already?
- // TOOD: used for the new values stream
- // the list is needed to be able to get reduceragg and combineragg persistentaggregate
- // for grouped streams working efficiently
- void updateState(S state, List<TridentTuple> tuples, TridentCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/TransactionalValue.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/TransactionalValue.java b/jstorm-client/src/main/java/storm/trident/state/TransactionalValue.java
deleted file mode 100644
index 933a0ea..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/TransactionalValue.java
+++ /dev/null
@@ -1,27 +0,0 @@
-package storm.trident.state;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-
-
-public class TransactionalValue<T> {
- T val;
- Long txid;
-
- public TransactionalValue(Long txid, T val) {
- this.val = val;
- this.txid = txid;
- }
-
- public T getVal() {
- return val;
- }
-
- public Long getTxid() {
- return txid;
- }
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/ValueUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/ValueUpdater.java b/jstorm-client/src/main/java/storm/trident/state/ValueUpdater.java
deleted file mode 100644
index 466a921..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/ValueUpdater.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package storm.trident.state;
-
-
-public interface ValueUpdater<T> {
- T update(T stored);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/CachedBatchReadsMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/CachedBatchReadsMap.java b/jstorm-client/src/main/java/storm/trident/state/map/CachedBatchReadsMap.java
deleted file mode 100644
index 7d8c442..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/CachedBatchReadsMap.java
+++ /dev/null
@@ -1,63 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.state.ValueUpdater;
-
-
-public class CachedBatchReadsMap<T> {
- public static class RetVal<T> {
- public boolean cached;
- public T val;
-
- public RetVal(T v, boolean c) {
- val = v;
- cached = c;
- }
- }
-
- Map<List<Object>, T> _cached = new HashMap<List<Object>, T>();
-
- public IBackingMap<T> _delegate;
-
- public CachedBatchReadsMap(IBackingMap<T> delegate) {
- _delegate = delegate;
- }
-
- public void reset() {
- _cached.clear();
- }
-
- public List<RetVal<T>> multiGet(List<List<Object>> keys) {
- // TODO: can optimize further by only querying backing map for keys not in the cache
- List<T> vals = _delegate.multiGet(keys);
- List<RetVal<T>> ret = new ArrayList(vals.size());
- for(int i=0; i<keys.size(); i++) {
- List<Object> key = keys.get(i);
- if(_cached.containsKey(key)) {
- ret.add(new RetVal(_cached.get(key), true));
- } else {
- ret.add(new RetVal(vals.get(i), false));
- }
- }
- return ret;
- }
-
- public void multiPut(List<List<Object>> keys, List<T> vals) {
- _delegate.multiPut(keys, vals);
- cache(keys, vals);
- }
-
- private void cache(List<List<Object>> keys, List<T> vals) {
- for(int i=0; i<keys.size(); i++) {
- List<Object> key = keys.get(i);
- T val = vals.get(i);
- _cached.put(key, val);
- }
- }
-
-
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/CachedMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/CachedMap.java b/jstorm-client/src/main/java/storm/trident/state/map/CachedMap.java
deleted file mode 100644
index f7ad646..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/CachedMap.java
+++ /dev/null
@@ -1,62 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import storm.trident.util.LRUMap;
-
-/**
- * Useful to layer over a map that communicates with a database. you generally layer opaque map over this over your database store
- * @author nathan
- * @param <T>
- */
-public class CachedMap<T> implements IBackingMap<T> {
- LRUMap<List<Object>, T> _cache;
- IBackingMap<T> _delegate;
-
- public CachedMap(IBackingMap<T> delegate, int cacheSize) {
- _cache = new LRUMap<List<Object>, T>(cacheSize);
- _delegate = delegate;
- }
-
- @Override
- public List<T> multiGet(List<List<Object>> keys) {
- Map<List<Object>, T> results = new HashMap<List<Object>, T>();
- List<List<Object>> toGet = new ArrayList<List<Object>>();
- for(List<Object> key: keys) {
- if(_cache.containsKey(key)) {
- results.put(key, _cache.get(key));
- } else {
- toGet.add(key);
- }
- }
-
- List<T> fetchedVals = _delegate.multiGet(toGet);
- for(int i=0; i<toGet.size(); i++) {
- List<Object> key = toGet.get(i);
- T val = fetchedVals.get(i);
- _cache.put(key, val);
- results.put(key, val);
- }
-
- List<T> ret = new ArrayList<T>(keys.size());
- for(List<Object> key: keys) {
- ret.add(results.get(key));
- }
- return ret;
- }
-
- @Override
- public void multiPut(List<List<Object>> keys, List<T> values) {
- cache(keys, values);
- _delegate.multiPut(keys, values);
- }
-
- private void cache(List<List<Object>> keys, List<T> values) {
- for(int i=0; i<keys.size(); i++) {
- _cache.put(keys.get(i), values.get(i));
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/IBackingMap.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/IBackingMap.java b/jstorm-client/src/main/java/storm/trident/state/map/IBackingMap.java
deleted file mode 100644
index c1fdc27..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/IBackingMap.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.state.map;
-
-import java.util.List;
-
-
-public interface IBackingMap<T> {
- List<T> multiGet(List<List<Object>> keys);
- void multiPut(List<List<Object>> keys, List<T> vals);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/state/map/MapCombinerAggStateUpdater.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/state/map/MapCombinerAggStateUpdater.java b/jstorm-client/src/main/java/storm/trident/state/map/MapCombinerAggStateUpdater.java
deleted file mode 100644
index 338c9ac..0000000
--- a/jstorm-client/src/main/java/storm/trident/state/map/MapCombinerAggStateUpdater.java
+++ /dev/null
@@ -1,66 +0,0 @@
-package storm.trident.state.map;
-
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentOperationContext;
-import storm.trident.state.CombinerValueUpdater;
-import storm.trident.state.StateUpdater;
-import storm.trident.state.ValueUpdater;
-import storm.trident.tuple.ComboList;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-public class MapCombinerAggStateUpdater implements StateUpdater<MapState> {
- CombinerAggregator _agg;
- Fields _groupFields;
- Fields _inputFields;
- ProjectionFactory _groupFactory;
- ProjectionFactory _inputFactory;
- ComboList.Factory _factory;
-
-
- public MapCombinerAggStateUpdater(CombinerAggregator agg, Fields groupFields, Fields inputFields) {
- _agg = agg;
- _groupFields = groupFields;
- _inputFields = inputFields;
- if(inputFields.size()!=1) {
- throw new IllegalArgumentException("Combiner aggs only take a single field as input. Got this instead: " + inputFields.toString());
- }
- _factory = new ComboList.Factory(groupFields.size(), inputFields.size());
- }
-
-
- @Override
- public void updateState(MapState map, List<TridentTuple> tuples, TridentCollector collector) {
- List<List<Object>> groups = new ArrayList<List<Object>>(tuples.size());
- List<ValueUpdater> updaters = new ArrayList<ValueUpdater>(tuples.size());
-
- for(TridentTuple t: tuples) {
- groups.add(_groupFactory.create(t));
- updaters.add(new CombinerValueUpdater(_agg,_inputFactory.create(t).getValue(0)));
- }
- List<Object> newVals = map.multiUpdate(groups, updaters);
-
- for(int i=0; i<tuples.size(); i++) {
- List<Object> key = groups.get(i);
- Object result = newVals.get(i);
- collector.emit(_factory.create(new List[] {key, new Values(result) }));
- }
- }
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- _groupFactory = context.makeProjectionFactory(_groupFields);
- _inputFactory = context.makeProjectionFactory(_inputFields);
- }
-
- @Override
- public void cleanup() {
- }
-
-}
[33/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/SupervisorWorkers.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/SupervisorWorkers.java b/jstorm-client/src/main/java/backtype/storm/generated/SupervisorWorkers.java
deleted file mode 100644
index b8cb513..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/SupervisorWorkers.java
+++ /dev/null
@@ -1,464 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SupervisorWorkers implements org.apache.thrift7.TBase<SupervisorWorkers, SupervisorWorkers._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("SupervisorWorkers");
-
- private static final org.apache.thrift7.protocol.TField SUPERVISOR_FIELD_DESC = new org.apache.thrift7.protocol.TField("supervisor", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
- private static final org.apache.thrift7.protocol.TField WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("workers", org.apache.thrift7.protocol.TType.LIST, (short)2);
-
- private SupervisorSummary supervisor; // required
- private List<WorkerSummary> workers; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SUPERVISOR((short)1, "supervisor"),
- WORKERS((short)2, "workers");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // SUPERVISOR
- return SUPERVISOR;
- case 2: // WORKERS
- return WORKERS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SUPERVISOR, new org.apache.thrift7.meta_data.FieldMetaData("supervisor", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, SupervisorSummary.class)));
- tmpMap.put(_Fields.WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("workers", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, WorkerSummary.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(SupervisorWorkers.class, metaDataMap);
- }
-
- public SupervisorWorkers() {
- }
-
- public SupervisorWorkers(
- SupervisorSummary supervisor,
- List<WorkerSummary> workers)
- {
- this();
- this.supervisor = supervisor;
- this.workers = workers;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public SupervisorWorkers(SupervisorWorkers other) {
- if (other.is_set_supervisor()) {
- this.supervisor = new SupervisorSummary(other.supervisor);
- }
- if (other.is_set_workers()) {
- List<WorkerSummary> __this__workers = new ArrayList<WorkerSummary>();
- for (WorkerSummary other_element : other.workers) {
- __this__workers.add(new WorkerSummary(other_element));
- }
- this.workers = __this__workers;
- }
- }
-
- public SupervisorWorkers deepCopy() {
- return new SupervisorWorkers(this);
- }
-
- @Override
- public void clear() {
- this.supervisor = null;
- this.workers = null;
- }
-
- public SupervisorSummary get_supervisor() {
- return this.supervisor;
- }
-
- public void set_supervisor(SupervisorSummary supervisor) {
- this.supervisor = supervisor;
- }
-
- public void unset_supervisor() {
- this.supervisor = null;
- }
-
- /** Returns true if field supervisor is set (has been assigned a value) and false otherwise */
- public boolean is_set_supervisor() {
- return this.supervisor != null;
- }
-
- public void set_supervisor_isSet(boolean value) {
- if (!value) {
- this.supervisor = null;
- }
- }
-
- public int get_workers_size() {
- return (this.workers == null) ? 0 : this.workers.size();
- }
-
- public java.util.Iterator<WorkerSummary> get_workers_iterator() {
- return (this.workers == null) ? null : this.workers.iterator();
- }
-
- public void add_to_workers(WorkerSummary elem) {
- if (this.workers == null) {
- this.workers = new ArrayList<WorkerSummary>();
- }
- this.workers.add(elem);
- }
-
- public List<WorkerSummary> get_workers() {
- return this.workers;
- }
-
- public void set_workers(List<WorkerSummary> workers) {
- this.workers = workers;
- }
-
- public void unset_workers() {
- this.workers = null;
- }
-
- /** Returns true if field workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_workers() {
- return this.workers != null;
- }
-
- public void set_workers_isSet(boolean value) {
- if (!value) {
- this.workers = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SUPERVISOR:
- if (value == null) {
- unset_supervisor();
- } else {
- set_supervisor((SupervisorSummary)value);
- }
- break;
-
- case WORKERS:
- if (value == null) {
- unset_workers();
- } else {
- set_workers((List<WorkerSummary>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SUPERVISOR:
- return get_supervisor();
-
- case WORKERS:
- return get_workers();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SUPERVISOR:
- return is_set_supervisor();
- case WORKERS:
- return is_set_workers();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof SupervisorWorkers)
- return this.equals((SupervisorWorkers)that);
- return false;
- }
-
- public boolean equals(SupervisorWorkers that) {
- if (that == null)
- return false;
-
- boolean this_present_supervisor = true && this.is_set_supervisor();
- boolean that_present_supervisor = true && that.is_set_supervisor();
- if (this_present_supervisor || that_present_supervisor) {
- if (!(this_present_supervisor && that_present_supervisor))
- return false;
- if (!this.supervisor.equals(that.supervisor))
- return false;
- }
-
- boolean this_present_workers = true && this.is_set_workers();
- boolean that_present_workers = true && that.is_set_workers();
- if (this_present_workers || that_present_workers) {
- if (!(this_present_workers && that_present_workers))
- return false;
- if (!this.workers.equals(that.workers))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_supervisor = true && (is_set_supervisor());
- builder.append(present_supervisor);
- if (present_supervisor)
- builder.append(supervisor);
-
- boolean present_workers = true && (is_set_workers());
- builder.append(present_workers);
- if (present_workers)
- builder.append(workers);
-
- return builder.toHashCode();
- }
-
- public int compareTo(SupervisorWorkers other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- SupervisorWorkers typedOther = (SupervisorWorkers)other;
-
- lastComparison = Boolean.valueOf(is_set_supervisor()).compareTo(typedOther.is_set_supervisor());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_supervisor()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.supervisor, typedOther.supervisor);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_workers()).compareTo(typedOther.is_set_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.workers, typedOther.workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // SUPERVISOR
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.supervisor = new SupervisorSummary();
- this.supervisor.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list205 = iprot.readListBegin();
- this.workers = new ArrayList<WorkerSummary>(_list205.size);
- for (int _i206 = 0; _i206 < _list205.size; ++_i206)
- {
- WorkerSummary _elem207; // required
- _elem207 = new WorkerSummary();
- _elem207.read(iprot);
- this.workers.add(_elem207);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.supervisor != null) {
- oprot.writeFieldBegin(SUPERVISOR_FIELD_DESC);
- this.supervisor.write(oprot);
- oprot.writeFieldEnd();
- }
- if (this.workers != null) {
- oprot.writeFieldBegin(WORKERS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRUCT, this.workers.size()));
- for (WorkerSummary _iter208 : this.workers)
- {
- _iter208.write(oprot);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("SupervisorWorkers(");
- boolean first = true;
-
- sb.append("supervisor:");
- if (this.supervisor == null) {
- sb.append("null");
- } else {
- sb.append(this.supervisor);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("workers:");
- if (this.workers == null) {
- sb.append("null");
- } else {
- sb.append(this.workers);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_supervisor()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'supervisor' is unset! Struct:" + toString());
- }
-
- if (!is_set_workers()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'workers' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/TaskMetricData.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/TaskMetricData.java b/jstorm-client/src/main/java/backtype/storm/generated/TaskMetricData.java
deleted file mode 100644
index 8d89649..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/TaskMetricData.java
+++ /dev/null
@@ -1,1135 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class TaskMetricData implements org.apache.thrift7.TBase<TaskMetricData, TaskMetricData._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("TaskMetricData");
-
- private static final org.apache.thrift7.protocol.TField TASK_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("task_id", org.apache.thrift7.protocol.TType.I32, (short)1);
- private static final org.apache.thrift7.protocol.TField COMPONENT_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("component_id", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField GAUGE_FIELD_DESC = new org.apache.thrift7.protocol.TField("gauge", org.apache.thrift7.protocol.TType.MAP, (short)3);
- private static final org.apache.thrift7.protocol.TField COUNTER_FIELD_DESC = new org.apache.thrift7.protocol.TField("counter", org.apache.thrift7.protocol.TType.MAP, (short)4);
- private static final org.apache.thrift7.protocol.TField METER_FIELD_DESC = new org.apache.thrift7.protocol.TField("meter", org.apache.thrift7.protocol.TType.MAP, (short)5);
- private static final org.apache.thrift7.protocol.TField TIMER_FIELD_DESC = new org.apache.thrift7.protocol.TField("timer", org.apache.thrift7.protocol.TType.MAP, (short)6);
- private static final org.apache.thrift7.protocol.TField HISTOGRAM_FIELD_DESC = new org.apache.thrift7.protocol.TField("histogram", org.apache.thrift7.protocol.TType.MAP, (short)7);
-
- private int task_id; // required
- private String component_id; // required
- private Map<String,Double> gauge; // required
- private Map<String,Double> counter; // required
- private Map<String,Double> meter; // required
- private Map<String,Double> timer; // required
- private Map<String,Double> histogram; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- TASK_ID((short)1, "task_id"),
- COMPONENT_ID((short)2, "component_id"),
- GAUGE((short)3, "gauge"),
- COUNTER((short)4, "counter"),
- METER((short)5, "meter"),
- TIMER((short)6, "timer"),
- HISTOGRAM((short)7, "histogram");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // TASK_ID
- return TASK_ID;
- case 2: // COMPONENT_ID
- return COMPONENT_ID;
- case 3: // GAUGE
- return GAUGE;
- case 4: // COUNTER
- return COUNTER;
- case 5: // METER
- return METER;
- case 6: // TIMER
- return TIMER;
- case 7: // HISTOGRAM
- return HISTOGRAM;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __TASK_ID_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.TASK_ID, new org.apache.thrift7.meta_data.FieldMetaData("task_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.COMPONENT_ID, new org.apache.thrift7.meta_data.FieldMetaData("component_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.GAUGE, new org.apache.thrift7.meta_data.FieldMetaData("gauge", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.COUNTER, new org.apache.thrift7.meta_data.FieldMetaData("counter", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.METER, new org.apache.thrift7.meta_data.FieldMetaData("meter", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.TIMER, new org.apache.thrift7.meta_data.FieldMetaData("timer", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- tmpMap.put(_Fields.HISTOGRAM, new org.apache.thrift7.meta_data.FieldMetaData("histogram", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.DOUBLE))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(TaskMetricData.class, metaDataMap);
- }
-
- public TaskMetricData() {
- }
-
- public TaskMetricData(
- int task_id,
- String component_id,
- Map<String,Double> gauge,
- Map<String,Double> counter,
- Map<String,Double> meter,
- Map<String,Double> timer,
- Map<String,Double> histogram)
- {
- this();
- this.task_id = task_id;
- set_task_id_isSet(true);
- this.component_id = component_id;
- this.gauge = gauge;
- this.counter = counter;
- this.meter = meter;
- this.timer = timer;
- this.histogram = histogram;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public TaskMetricData(TaskMetricData other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- this.task_id = other.task_id;
- if (other.is_set_component_id()) {
- this.component_id = other.component_id;
- }
- if (other.is_set_gauge()) {
- Map<String,Double> __this__gauge = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.gauge.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__gauge_copy_key = other_element_key;
-
- Double __this__gauge_copy_value = other_element_value;
-
- __this__gauge.put(__this__gauge_copy_key, __this__gauge_copy_value);
- }
- this.gauge = __this__gauge;
- }
- if (other.is_set_counter()) {
- Map<String,Double> __this__counter = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.counter.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__counter_copy_key = other_element_key;
-
- Double __this__counter_copy_value = other_element_value;
-
- __this__counter.put(__this__counter_copy_key, __this__counter_copy_value);
- }
- this.counter = __this__counter;
- }
- if (other.is_set_meter()) {
- Map<String,Double> __this__meter = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.meter.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__meter_copy_key = other_element_key;
-
- Double __this__meter_copy_value = other_element_value;
-
- __this__meter.put(__this__meter_copy_key, __this__meter_copy_value);
- }
- this.meter = __this__meter;
- }
- if (other.is_set_timer()) {
- Map<String,Double> __this__timer = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.timer.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__timer_copy_key = other_element_key;
-
- Double __this__timer_copy_value = other_element_value;
-
- __this__timer.put(__this__timer_copy_key, __this__timer_copy_value);
- }
- this.timer = __this__timer;
- }
- if (other.is_set_histogram()) {
- Map<String,Double> __this__histogram = new HashMap<String,Double>();
- for (Map.Entry<String, Double> other_element : other.histogram.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Double other_element_value = other_element.getValue();
-
- String __this__histogram_copy_key = other_element_key;
-
- Double __this__histogram_copy_value = other_element_value;
-
- __this__histogram.put(__this__histogram_copy_key, __this__histogram_copy_value);
- }
- this.histogram = __this__histogram;
- }
- }
-
- public TaskMetricData deepCopy() {
- return new TaskMetricData(this);
- }
-
- @Override
- public void clear() {
- set_task_id_isSet(false);
- this.task_id = 0;
- this.component_id = null;
- this.gauge = null;
- this.counter = null;
- this.meter = null;
- this.timer = null;
- this.histogram = null;
- }
-
- public int get_task_id() {
- return this.task_id;
- }
-
- public void set_task_id(int task_id) {
- this.task_id = task_id;
- set_task_id_isSet(true);
- }
-
- public void unset_task_id() {
- __isset_bit_vector.clear(__TASK_ID_ISSET_ID);
- }
-
- /** Returns true if field task_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_task_id() {
- return __isset_bit_vector.get(__TASK_ID_ISSET_ID);
- }
-
- public void set_task_id_isSet(boolean value) {
- __isset_bit_vector.set(__TASK_ID_ISSET_ID, value);
- }
-
- public String get_component_id() {
- return this.component_id;
- }
-
- public void set_component_id(String component_id) {
- this.component_id = component_id;
- }
-
- public void unset_component_id() {
- this.component_id = null;
- }
-
- /** Returns true if field component_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_component_id() {
- return this.component_id != null;
- }
-
- public void set_component_id_isSet(boolean value) {
- if (!value) {
- this.component_id = null;
- }
- }
-
- public int get_gauge_size() {
- return (this.gauge == null) ? 0 : this.gauge.size();
- }
-
- public void put_to_gauge(String key, double val) {
- if (this.gauge == null) {
- this.gauge = new HashMap<String,Double>();
- }
- this.gauge.put(key, val);
- }
-
- public Map<String,Double> get_gauge() {
- return this.gauge;
- }
-
- public void set_gauge(Map<String,Double> gauge) {
- this.gauge = gauge;
- }
-
- public void unset_gauge() {
- this.gauge = null;
- }
-
- /** Returns true if field gauge is set (has been assigned a value) and false otherwise */
- public boolean is_set_gauge() {
- return this.gauge != null;
- }
-
- public void set_gauge_isSet(boolean value) {
- if (!value) {
- this.gauge = null;
- }
- }
-
- public int get_counter_size() {
- return (this.counter == null) ? 0 : this.counter.size();
- }
-
- public void put_to_counter(String key, double val) {
- if (this.counter == null) {
- this.counter = new HashMap<String,Double>();
- }
- this.counter.put(key, val);
- }
-
- public Map<String,Double> get_counter() {
- return this.counter;
- }
-
- public void set_counter(Map<String,Double> counter) {
- this.counter = counter;
- }
-
- public void unset_counter() {
- this.counter = null;
- }
-
- /** Returns true if field counter is set (has been assigned a value) and false otherwise */
- public boolean is_set_counter() {
- return this.counter != null;
- }
-
- public void set_counter_isSet(boolean value) {
- if (!value) {
- this.counter = null;
- }
- }
-
- public int get_meter_size() {
- return (this.meter == null) ? 0 : this.meter.size();
- }
-
- public void put_to_meter(String key, double val) {
- if (this.meter == null) {
- this.meter = new HashMap<String,Double>();
- }
- this.meter.put(key, val);
- }
-
- public Map<String,Double> get_meter() {
- return this.meter;
- }
-
- public void set_meter(Map<String,Double> meter) {
- this.meter = meter;
- }
-
- public void unset_meter() {
- this.meter = null;
- }
-
- /** Returns true if field meter is set (has been assigned a value) and false otherwise */
- public boolean is_set_meter() {
- return this.meter != null;
- }
-
- public void set_meter_isSet(boolean value) {
- if (!value) {
- this.meter = null;
- }
- }
-
- public int get_timer_size() {
- return (this.timer == null) ? 0 : this.timer.size();
- }
-
- public void put_to_timer(String key, double val) {
- if (this.timer == null) {
- this.timer = new HashMap<String,Double>();
- }
- this.timer.put(key, val);
- }
-
- public Map<String,Double> get_timer() {
- return this.timer;
- }
-
- public void set_timer(Map<String,Double> timer) {
- this.timer = timer;
- }
-
- public void unset_timer() {
- this.timer = null;
- }
-
- /** Returns true if field timer is set (has been assigned a value) and false otherwise */
- public boolean is_set_timer() {
- return this.timer != null;
- }
-
- public void set_timer_isSet(boolean value) {
- if (!value) {
- this.timer = null;
- }
- }
-
- public int get_histogram_size() {
- return (this.histogram == null) ? 0 : this.histogram.size();
- }
-
- public void put_to_histogram(String key, double val) {
- if (this.histogram == null) {
- this.histogram = new HashMap<String,Double>();
- }
- this.histogram.put(key, val);
- }
-
- public Map<String,Double> get_histogram() {
- return this.histogram;
- }
-
- public void set_histogram(Map<String,Double> histogram) {
- this.histogram = histogram;
- }
-
- public void unset_histogram() {
- this.histogram = null;
- }
-
- /** Returns true if field histogram is set (has been assigned a value) and false otherwise */
- public boolean is_set_histogram() {
- return this.histogram != null;
- }
-
- public void set_histogram_isSet(boolean value) {
- if (!value) {
- this.histogram = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case TASK_ID:
- if (value == null) {
- unset_task_id();
- } else {
- set_task_id((Integer)value);
- }
- break;
-
- case COMPONENT_ID:
- if (value == null) {
- unset_component_id();
- } else {
- set_component_id((String)value);
- }
- break;
-
- case GAUGE:
- if (value == null) {
- unset_gauge();
- } else {
- set_gauge((Map<String,Double>)value);
- }
- break;
-
- case COUNTER:
- if (value == null) {
- unset_counter();
- } else {
- set_counter((Map<String,Double>)value);
- }
- break;
-
- case METER:
- if (value == null) {
- unset_meter();
- } else {
- set_meter((Map<String,Double>)value);
- }
- break;
-
- case TIMER:
- if (value == null) {
- unset_timer();
- } else {
- set_timer((Map<String,Double>)value);
- }
- break;
-
- case HISTOGRAM:
- if (value == null) {
- unset_histogram();
- } else {
- set_histogram((Map<String,Double>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case TASK_ID:
- return Integer.valueOf(get_task_id());
-
- case COMPONENT_ID:
- return get_component_id();
-
- case GAUGE:
- return get_gauge();
-
- case COUNTER:
- return get_counter();
-
- case METER:
- return get_meter();
-
- case TIMER:
- return get_timer();
-
- case HISTOGRAM:
- return get_histogram();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case TASK_ID:
- return is_set_task_id();
- case COMPONENT_ID:
- return is_set_component_id();
- case GAUGE:
- return is_set_gauge();
- case COUNTER:
- return is_set_counter();
- case METER:
- return is_set_meter();
- case TIMER:
- return is_set_timer();
- case HISTOGRAM:
- return is_set_histogram();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof TaskMetricData)
- return this.equals((TaskMetricData)that);
- return false;
- }
-
- public boolean equals(TaskMetricData that) {
- if (that == null)
- return false;
-
- boolean this_present_task_id = true;
- boolean that_present_task_id = true;
- if (this_present_task_id || that_present_task_id) {
- if (!(this_present_task_id && that_present_task_id))
- return false;
- if (this.task_id != that.task_id)
- return false;
- }
-
- boolean this_present_component_id = true && this.is_set_component_id();
- boolean that_present_component_id = true && that.is_set_component_id();
- if (this_present_component_id || that_present_component_id) {
- if (!(this_present_component_id && that_present_component_id))
- return false;
- if (!this.component_id.equals(that.component_id))
- return false;
- }
-
- boolean this_present_gauge = true && this.is_set_gauge();
- boolean that_present_gauge = true && that.is_set_gauge();
- if (this_present_gauge || that_present_gauge) {
- if (!(this_present_gauge && that_present_gauge))
- return false;
- if (!this.gauge.equals(that.gauge))
- return false;
- }
-
- boolean this_present_counter = true && this.is_set_counter();
- boolean that_present_counter = true && that.is_set_counter();
- if (this_present_counter || that_present_counter) {
- if (!(this_present_counter && that_present_counter))
- return false;
- if (!this.counter.equals(that.counter))
- return false;
- }
-
- boolean this_present_meter = true && this.is_set_meter();
- boolean that_present_meter = true && that.is_set_meter();
- if (this_present_meter || that_present_meter) {
- if (!(this_present_meter && that_present_meter))
- return false;
- if (!this.meter.equals(that.meter))
- return false;
- }
-
- boolean this_present_timer = true && this.is_set_timer();
- boolean that_present_timer = true && that.is_set_timer();
- if (this_present_timer || that_present_timer) {
- if (!(this_present_timer && that_present_timer))
- return false;
- if (!this.timer.equals(that.timer))
- return false;
- }
-
- boolean this_present_histogram = true && this.is_set_histogram();
- boolean that_present_histogram = true && that.is_set_histogram();
- if (this_present_histogram || that_present_histogram) {
- if (!(this_present_histogram && that_present_histogram))
- return false;
- if (!this.histogram.equals(that.histogram))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_task_id = true;
- builder.append(present_task_id);
- if (present_task_id)
- builder.append(task_id);
-
- boolean present_component_id = true && (is_set_component_id());
- builder.append(present_component_id);
- if (present_component_id)
- builder.append(component_id);
-
- boolean present_gauge = true && (is_set_gauge());
- builder.append(present_gauge);
- if (present_gauge)
- builder.append(gauge);
-
- boolean present_counter = true && (is_set_counter());
- builder.append(present_counter);
- if (present_counter)
- builder.append(counter);
-
- boolean present_meter = true && (is_set_meter());
- builder.append(present_meter);
- if (present_meter)
- builder.append(meter);
-
- boolean present_timer = true && (is_set_timer());
- builder.append(present_timer);
- if (present_timer)
- builder.append(timer);
-
- boolean present_histogram = true && (is_set_histogram());
- builder.append(present_histogram);
- if (present_histogram)
- builder.append(histogram);
-
- return builder.toHashCode();
- }
-
- public int compareTo(TaskMetricData other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- TaskMetricData typedOther = (TaskMetricData)other;
-
- lastComparison = Boolean.valueOf(is_set_task_id()).compareTo(typedOther.is_set_task_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_task_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.task_id, typedOther.task_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_component_id()).compareTo(typedOther.is_set_component_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_component_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.component_id, typedOther.component_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_gauge()).compareTo(typedOther.is_set_gauge());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_gauge()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.gauge, typedOther.gauge);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_counter()).compareTo(typedOther.is_set_counter());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_counter()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.counter, typedOther.counter);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_meter()).compareTo(typedOther.is_set_meter());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_meter()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.meter, typedOther.meter);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_timer()).compareTo(typedOther.is_set_timer());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_timer()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.timer, typedOther.timer);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_histogram()).compareTo(typedOther.is_set_histogram());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_histogram()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.histogram, typedOther.histogram);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // TASK_ID
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.task_id = iprot.readI32();
- set_task_id_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // COMPONENT_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.component_id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // GAUGE
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map209 = iprot.readMapBegin();
- this.gauge = new HashMap<String,Double>(2*_map209.size);
- for (int _i210 = 0; _i210 < _map209.size; ++_i210)
- {
- String _key211; // required
- double _val212; // required
- _key211 = iprot.readString();
- _val212 = iprot.readDouble();
- this.gauge.put(_key211, _val212);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // COUNTER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map213 = iprot.readMapBegin();
- this.counter = new HashMap<String,Double>(2*_map213.size);
- for (int _i214 = 0; _i214 < _map213.size; ++_i214)
- {
- String _key215; // required
- double _val216; // required
- _key215 = iprot.readString();
- _val216 = iprot.readDouble();
- this.counter.put(_key215, _val216);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // METER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map217 = iprot.readMapBegin();
- this.meter = new HashMap<String,Double>(2*_map217.size);
- for (int _i218 = 0; _i218 < _map217.size; ++_i218)
- {
- String _key219; // required
- double _val220; // required
- _key219 = iprot.readString();
- _val220 = iprot.readDouble();
- this.meter.put(_key219, _val220);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 6: // TIMER
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map221 = iprot.readMapBegin();
- this.timer = new HashMap<String,Double>(2*_map221.size);
- for (int _i222 = 0; _i222 < _map221.size; ++_i222)
- {
- String _key223; // required
- double _val224; // required
- _key223 = iprot.readString();
- _val224 = iprot.readDouble();
- this.timer.put(_key223, _val224);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 7: // HISTOGRAM
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map225 = iprot.readMapBegin();
- this.histogram = new HashMap<String,Double>(2*_map225.size);
- for (int _i226 = 0; _i226 < _map225.size; ++_i226)
- {
- String _key227; // required
- double _val228; // required
- _key227 = iprot.readString();
- _val228 = iprot.readDouble();
- this.histogram.put(_key227, _val228);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- oprot.writeFieldBegin(TASK_ID_FIELD_DESC);
- oprot.writeI32(this.task_id);
- oprot.writeFieldEnd();
- if (this.component_id != null) {
- oprot.writeFieldBegin(COMPONENT_ID_FIELD_DESC);
- oprot.writeString(this.component_id);
- oprot.writeFieldEnd();
- }
- if (this.gauge != null) {
- oprot.writeFieldBegin(GAUGE_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.gauge.size()));
- for (Map.Entry<String, Double> _iter229 : this.gauge.entrySet())
- {
- oprot.writeString(_iter229.getKey());
- oprot.writeDouble(_iter229.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.counter != null) {
- oprot.writeFieldBegin(COUNTER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.counter.size()));
- for (Map.Entry<String, Double> _iter230 : this.counter.entrySet())
- {
- oprot.writeString(_iter230.getKey());
- oprot.writeDouble(_iter230.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.meter != null) {
- oprot.writeFieldBegin(METER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.meter.size()));
- for (Map.Entry<String, Double> _iter231 : this.meter.entrySet())
- {
- oprot.writeString(_iter231.getKey());
- oprot.writeDouble(_iter231.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.timer != null) {
- oprot.writeFieldBegin(TIMER_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.timer.size()));
- for (Map.Entry<String, Double> _iter232 : this.timer.entrySet())
- {
- oprot.writeString(_iter232.getKey());
- oprot.writeDouble(_iter232.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.histogram != null) {
- oprot.writeFieldBegin(HISTOGRAM_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.DOUBLE, this.histogram.size()));
- for (Map.Entry<String, Double> _iter233 : this.histogram.entrySet())
- {
- oprot.writeString(_iter233.getKey());
- oprot.writeDouble(_iter233.getValue());
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("TaskMetricData(");
- boolean first = true;
-
- sb.append("task_id:");
- sb.append(this.task_id);
- first = false;
- if (!first) sb.append(", ");
- sb.append("component_id:");
- if (this.component_id == null) {
- sb.append("null");
- } else {
- sb.append(this.component_id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("gauge:");
- if (this.gauge == null) {
- sb.append("null");
- } else {
- sb.append(this.gauge);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("counter:");
- if (this.counter == null) {
- sb.append("null");
- } else {
- sb.append(this.counter);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("meter:");
- if (this.meter == null) {
- sb.append("null");
- } else {
- sb.append(this.meter);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("timer:");
- if (this.timer == null) {
- sb.append("null");
- } else {
- sb.append(this.timer);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("histogram:");
- if (this.histogram == null) {
- sb.append("null");
- } else {
- sb.append(this.histogram);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_task_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'task_id' is unset! Struct:" + toString());
- }
-
- if (!is_set_component_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'component_id' is unset! Struct:" + toString());
- }
-
- if (!is_set_gauge()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'gauge' is unset! Struct:" + toString());
- }
-
- if (!is_set_counter()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'counter' is unset! Struct:" + toString());
- }
-
- if (!is_set_meter()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'meter' is unset! Struct:" + toString());
- }
-
- if (!is_set_timer()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'timer' is unset! Struct:" + toString());
- }
-
- if (!is_set_histogram()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'histogram' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[13/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/py/storm/ttypes.py
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/py/storm/ttypes.py b/jstorm-client/src/main/py/storm/ttypes.py
deleted file mode 100644
index 0e9ba3e..0000000
--- a/jstorm-client/src/main/py/storm/ttypes.py
+++ /dev/null
@@ -1,4254 +0,0 @@
-#
-# Autogenerated by Thrift Compiler (0.7.0)
-#
-# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
-#
-
-from thrift.Thrift import *
-
-from thrift.transport import TTransport
-from thrift.protocol import TBinaryProtocol, TProtocol
-try:
- from thrift.protocol import fastbinary
-except:
- fastbinary = None
-
-
-class TopologyInitialStatus:
- ACTIVE = 1
- INACTIVE = 2
-
- _VALUES_TO_NAMES = {
- 1: "ACTIVE",
- 2: "INACTIVE",
- }
-
- _NAMES_TO_VALUES = {
- "ACTIVE": 1,
- "INACTIVE": 2,
- }
-
-
-class JavaObjectArg:
- """
- Attributes:
- - int_arg
- - long_arg
- - string_arg
- - bool_arg
- - binary_arg
- - double_arg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.I32, 'int_arg', None, None, ), # 1
- (2, TType.I64, 'long_arg', None, None, ), # 2
- (3, TType.STRING, 'string_arg', None, None, ), # 3
- (4, TType.BOOL, 'bool_arg', None, None, ), # 4
- (5, TType.STRING, 'binary_arg', None, None, ), # 5
- (6, TType.DOUBLE, 'double_arg', None, None, ), # 6
- )
-
- def __hash__(self):
- return 0 + hash(self.int_arg) + hash(self.long_arg) + hash(self.string_arg) + hash(self.bool_arg) + hash(self.binary_arg) + hash(self.double_arg)
-
- def __init__(self, int_arg=None, long_arg=None, string_arg=None, bool_arg=None, binary_arg=None, double_arg=None,):
- self.int_arg = int_arg
- self.long_arg = long_arg
- self.string_arg = string_arg
- self.bool_arg = bool_arg
- self.binary_arg = binary_arg
- self.double_arg = double_arg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.I32:
- self.int_arg = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.I64:
- self.long_arg = iprot.readI64();
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRING:
- self.string_arg = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.BOOL:
- self.bool_arg = iprot.readBool();
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.STRING:
- self.binary_arg = iprot.readString();
- else:
- iprot.skip(ftype)
- elif fid == 6:
- if ftype == TType.DOUBLE:
- self.double_arg = iprot.readDouble();
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('JavaObjectArg')
- if self.int_arg is not None:
- oprot.writeFieldBegin('int_arg', TType.I32, 1)
- oprot.writeI32(self.int_arg)
- oprot.writeFieldEnd()
- if self.long_arg is not None:
- oprot.writeFieldBegin('long_arg', TType.I64, 2)
- oprot.writeI64(self.long_arg)
- oprot.writeFieldEnd()
- if self.string_arg is not None:
- oprot.writeFieldBegin('string_arg', TType.STRING, 3)
- oprot.writeString(self.string_arg.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.bool_arg is not None:
- oprot.writeFieldBegin('bool_arg', TType.BOOL, 4)
- oprot.writeBool(self.bool_arg)
- oprot.writeFieldEnd()
- if self.binary_arg is not None:
- oprot.writeFieldBegin('binary_arg', TType.STRING, 5)
- oprot.writeString(self.binary_arg)
- oprot.writeFieldEnd()
- if self.double_arg is not None:
- oprot.writeFieldBegin('double_arg', TType.DOUBLE, 6)
- oprot.writeDouble(self.double_arg)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class JavaObject:
- """
- Attributes:
- - full_class_name
- - args_list
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'full_class_name', None, None, ), # 1
- (2, TType.LIST, 'args_list', (TType.STRUCT,(JavaObjectArg, JavaObjectArg.thrift_spec)), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.full_class_name) + hash(self.args_list)
-
- def __init__(self, full_class_name=None, args_list=None,):
- self.full_class_name = full_class_name
- self.args_list = args_list
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.full_class_name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.LIST:
- self.args_list = []
- (_etype3, _size0) = iprot.readListBegin()
- for _i4 in xrange(_size0):
- _elem5 = JavaObjectArg()
- _elem5.read(iprot)
- self.args_list.append(_elem5)
- iprot.readListEnd()
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('JavaObject')
- if self.full_class_name is not None:
- oprot.writeFieldBegin('full_class_name', TType.STRING, 1)
- oprot.writeString(self.full_class_name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.args_list is not None:
- oprot.writeFieldBegin('args_list', TType.LIST, 2)
- oprot.writeListBegin(TType.STRUCT, len(self.args_list))
- for iter6 in self.args_list:
- iter6.write(oprot)
- oprot.writeListEnd()
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.full_class_name is None:
- raise TProtocol.TProtocolException(message='Required field full_class_name is unset!')
- if self.args_list is None:
- raise TProtocol.TProtocolException(message='Required field args_list is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class NullStruct:
-
- thrift_spec = (
- )
-
- def __hash__(self):
- return 0
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('NullStruct')
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class GlobalStreamId:
- """
- Attributes:
- - componentId
- - streamId
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'componentId', None, None, ), # 1
- (2, TType.STRING, 'streamId', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.componentId) + hash(self.streamId)
-
- def __init__(self, componentId=None, streamId=None,):
- self.componentId = componentId
- self.streamId = streamId
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.componentId = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.streamId = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('GlobalStreamId')
- if self.componentId is not None:
- oprot.writeFieldBegin('componentId', TType.STRING, 1)
- oprot.writeString(self.componentId.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.streamId is not None:
- oprot.writeFieldBegin('streamId', TType.STRING, 2)
- oprot.writeString(self.streamId.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.componentId is None:
- raise TProtocol.TProtocolException(message='Required field componentId is unset!')
- if self.streamId is None:
- raise TProtocol.TProtocolException(message='Required field streamId is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class Grouping:
- """
- Attributes:
- - fields
- - shuffle
- - all
- - none
- - direct
- - custom_object
- - custom_serialized
- - local_or_shuffle
- - localFirst
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.LIST, 'fields', (TType.STRING,None), None, ), # 1
- (2, TType.STRUCT, 'shuffle', (NullStruct, NullStruct.thrift_spec), None, ), # 2
- (3, TType.STRUCT, 'all', (NullStruct, NullStruct.thrift_spec), None, ), # 3
- (4, TType.STRUCT, 'none', (NullStruct, NullStruct.thrift_spec), None, ), # 4
- (5, TType.STRUCT, 'direct', (NullStruct, NullStruct.thrift_spec), None, ), # 5
- (6, TType.STRUCT, 'custom_object', (JavaObject, JavaObject.thrift_spec), None, ), # 6
- (7, TType.STRING, 'custom_serialized', None, None, ), # 7
- (8, TType.STRUCT, 'local_or_shuffle', (NullStruct, NullStruct.thrift_spec), None, ), # 8
- (9, TType.STRUCT, 'localFirst', (NullStruct, NullStruct.thrift_spec), None, ), # 9
- )
-
- def __hash__(self):
- return 0 + hash(self.fields) + hash(self.shuffle) + hash(self.all) + hash(self.none) + hash(self.direct) + hash(self.custom_object) + hash(self.custom_serialized) + hash(self.local_or_shuffle) + hash(self.localFirst)
-
- def __init__(self, fields=None, shuffle=None, all=None, none=None, direct=None, custom_object=None, custom_serialized=None, local_or_shuffle=None, localFirst=None,):
- self.fields = fields
- self.shuffle = shuffle
- self.all = all
- self.none = none
- self.direct = direct
- self.custom_object = custom_object
- self.custom_serialized = custom_serialized
- self.local_or_shuffle = local_or_shuffle
- self.localFirst = localFirst
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.LIST:
- self.fields = []
- (_etype10, _size7) = iprot.readListBegin()
- for _i11 in xrange(_size7):
- _elem12 = iprot.readString().decode('utf-8')
- self.fields.append(_elem12)
- iprot.readListEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.shuffle = NullStruct()
- self.shuffle.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRUCT:
- self.all = NullStruct()
- self.all.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.STRUCT:
- self.none = NullStruct()
- self.none.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.STRUCT:
- self.direct = NullStruct()
- self.direct.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 6:
- if ftype == TType.STRUCT:
- self.custom_object = JavaObject()
- self.custom_object.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 7:
- if ftype == TType.STRING:
- self.custom_serialized = iprot.readString();
- else:
- iprot.skip(ftype)
- elif fid == 8:
- if ftype == TType.STRUCT:
- self.local_or_shuffle = NullStruct()
- self.local_or_shuffle.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 9:
- if ftype == TType.STRUCT:
- self.localFirst = NullStruct()
- self.localFirst.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('Grouping')
- if self.fields is not None:
- oprot.writeFieldBegin('fields', TType.LIST, 1)
- oprot.writeListBegin(TType.STRING, len(self.fields))
- for iter13 in self.fields:
- oprot.writeString(iter13.encode('utf-8'))
- oprot.writeListEnd()
- oprot.writeFieldEnd()
- if self.shuffle is not None:
- oprot.writeFieldBegin('shuffle', TType.STRUCT, 2)
- self.shuffle.write(oprot)
- oprot.writeFieldEnd()
- if self.all is not None:
- oprot.writeFieldBegin('all', TType.STRUCT, 3)
- self.all.write(oprot)
- oprot.writeFieldEnd()
- if self.none is not None:
- oprot.writeFieldBegin('none', TType.STRUCT, 4)
- self.none.write(oprot)
- oprot.writeFieldEnd()
- if self.direct is not None:
- oprot.writeFieldBegin('direct', TType.STRUCT, 5)
- self.direct.write(oprot)
- oprot.writeFieldEnd()
- if self.custom_object is not None:
- oprot.writeFieldBegin('custom_object', TType.STRUCT, 6)
- self.custom_object.write(oprot)
- oprot.writeFieldEnd()
- if self.custom_serialized is not None:
- oprot.writeFieldBegin('custom_serialized', TType.STRING, 7)
- oprot.writeString(self.custom_serialized)
- oprot.writeFieldEnd()
- if self.local_or_shuffle is not None:
- oprot.writeFieldBegin('local_or_shuffle', TType.STRUCT, 8)
- self.local_or_shuffle.write(oprot)
- oprot.writeFieldEnd()
- if self.localFirst is not None:
- oprot.writeFieldBegin('localFirst', TType.STRUCT, 9)
- self.localFirst.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class StreamInfo:
- """
- Attributes:
- - output_fields
- - direct
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.LIST, 'output_fields', (TType.STRING,None), None, ), # 1
- (2, TType.BOOL, 'direct', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.output_fields) + hash(self.direct)
-
- def __init__(self, output_fields=None, direct=None,):
- self.output_fields = output_fields
- self.direct = direct
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.LIST:
- self.output_fields = []
- (_etype17, _size14) = iprot.readListBegin()
- for _i18 in xrange(_size14):
- _elem19 = iprot.readString().decode('utf-8')
- self.output_fields.append(_elem19)
- iprot.readListEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.BOOL:
- self.direct = iprot.readBool();
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('StreamInfo')
- if self.output_fields is not None:
- oprot.writeFieldBegin('output_fields', TType.LIST, 1)
- oprot.writeListBegin(TType.STRING, len(self.output_fields))
- for iter20 in self.output_fields:
- oprot.writeString(iter20.encode('utf-8'))
- oprot.writeListEnd()
- oprot.writeFieldEnd()
- if self.direct is not None:
- oprot.writeFieldBegin('direct', TType.BOOL, 2)
- oprot.writeBool(self.direct)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.output_fields is None:
- raise TProtocol.TProtocolException(message='Required field output_fields is unset!')
- if self.direct is None:
- raise TProtocol.TProtocolException(message='Required field direct is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ShellComponent:
- """
- Attributes:
- - execution_command
- - script
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'execution_command', None, None, ), # 1
- (2, TType.STRING, 'script', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.execution_command) + hash(self.script)
-
- def __init__(self, execution_command=None, script=None,):
- self.execution_command = execution_command
- self.script = script
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.execution_command = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.script = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ShellComponent')
- if self.execution_command is not None:
- oprot.writeFieldBegin('execution_command', TType.STRING, 1)
- oprot.writeString(self.execution_command.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.script is not None:
- oprot.writeFieldBegin('script', TType.STRING, 2)
- oprot.writeString(self.script.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ComponentObject:
- """
- Attributes:
- - serialized_java
- - shell
- - java_object
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'serialized_java', None, None, ), # 1
- (2, TType.STRUCT, 'shell', (ShellComponent, ShellComponent.thrift_spec), None, ), # 2
- (3, TType.STRUCT, 'java_object', (JavaObject, JavaObject.thrift_spec), None, ), # 3
- )
-
- def __hash__(self):
- return 0 + hash(self.serialized_java) + hash(self.shell) + hash(self.java_object)
-
- def __init__(self, serialized_java=None, shell=None, java_object=None,):
- self.serialized_java = serialized_java
- self.shell = shell
- self.java_object = java_object
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.serialized_java = iprot.readString();
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.shell = ShellComponent()
- self.shell.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRUCT:
- self.java_object = JavaObject()
- self.java_object.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ComponentObject')
- if self.serialized_java is not None:
- oprot.writeFieldBegin('serialized_java', TType.STRING, 1)
- oprot.writeString(self.serialized_java)
- oprot.writeFieldEnd()
- if self.shell is not None:
- oprot.writeFieldBegin('shell', TType.STRUCT, 2)
- self.shell.write(oprot)
- oprot.writeFieldEnd()
- if self.java_object is not None:
- oprot.writeFieldBegin('java_object', TType.STRUCT, 3)
- self.java_object.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ComponentCommon:
- """
- Attributes:
- - inputs
- - streams
- - parallelism_hint
- - json_conf
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.MAP, 'inputs', (TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.STRUCT,(Grouping, Grouping.thrift_spec)), None, ), # 1
- (2, TType.MAP, 'streams', (TType.STRING,None,TType.STRUCT,(StreamInfo, StreamInfo.thrift_spec)), None, ), # 2
- (3, TType.I32, 'parallelism_hint', None, None, ), # 3
- (4, TType.STRING, 'json_conf', None, None, ), # 4
- )
-
- def __hash__(self):
- return 0 + hash(self.inputs) + hash(self.streams) + hash(self.parallelism_hint) + hash(self.json_conf)
-
- def __init__(self, inputs=None, streams=None, parallelism_hint=None, json_conf=None,):
- self.inputs = inputs
- self.streams = streams
- self.parallelism_hint = parallelism_hint
- self.json_conf = json_conf
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.MAP:
- self.inputs = {}
- (_ktype22, _vtype23, _size21 ) = iprot.readMapBegin()
- for _i25 in xrange(_size21):
- _key26 = GlobalStreamId()
- _key26.read(iprot)
- _val27 = Grouping()
- _val27.read(iprot)
- self.inputs[_key26] = _val27
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.MAP:
- self.streams = {}
- (_ktype29, _vtype30, _size28 ) = iprot.readMapBegin()
- for _i32 in xrange(_size28):
- _key33 = iprot.readString().decode('utf-8')
- _val34 = StreamInfo()
- _val34.read(iprot)
- self.streams[_key33] = _val34
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.I32:
- self.parallelism_hint = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.STRING:
- self.json_conf = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ComponentCommon')
- if self.inputs is not None:
- oprot.writeFieldBegin('inputs', TType.MAP, 1)
- oprot.writeMapBegin(TType.STRUCT, TType.STRUCT, len(self.inputs))
- for kiter35,viter36 in self.inputs.items():
- kiter35.write(oprot)
- viter36.write(oprot)
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.streams is not None:
- oprot.writeFieldBegin('streams', TType.MAP, 2)
- oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.streams))
- for kiter37,viter38 in self.streams.items():
- oprot.writeString(kiter37.encode('utf-8'))
- viter38.write(oprot)
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.parallelism_hint is not None:
- oprot.writeFieldBegin('parallelism_hint', TType.I32, 3)
- oprot.writeI32(self.parallelism_hint)
- oprot.writeFieldEnd()
- if self.json_conf is not None:
- oprot.writeFieldBegin('json_conf', TType.STRING, 4)
- oprot.writeString(self.json_conf.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.inputs is None:
- raise TProtocol.TProtocolException(message='Required field inputs is unset!')
- if self.streams is None:
- raise TProtocol.TProtocolException(message='Required field streams is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class SpoutSpec:
- """
- Attributes:
- - spout_object
- - common
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'spout_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.spout_object) + hash(self.common)
-
- def __init__(self, spout_object=None, common=None,):
- self.spout_object = spout_object
- self.common = common
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.spout_object = ComponentObject()
- self.spout_object.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.common = ComponentCommon()
- self.common.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('SpoutSpec')
- if self.spout_object is not None:
- oprot.writeFieldBegin('spout_object', TType.STRUCT, 1)
- self.spout_object.write(oprot)
- oprot.writeFieldEnd()
- if self.common is not None:
- oprot.writeFieldBegin('common', TType.STRUCT, 2)
- self.common.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.spout_object is None:
- raise TProtocol.TProtocolException(message='Required field spout_object is unset!')
- if self.common is None:
- raise TProtocol.TProtocolException(message='Required field common is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class Bolt:
- """
- Attributes:
- - bolt_object
- - common
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'bolt_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.bolt_object) + hash(self.common)
-
- def __init__(self, bolt_object=None, common=None,):
- self.bolt_object = bolt_object
- self.common = common
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.bolt_object = ComponentObject()
- self.bolt_object.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.common = ComponentCommon()
- self.common.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('Bolt')
- if self.bolt_object is not None:
- oprot.writeFieldBegin('bolt_object', TType.STRUCT, 1)
- self.bolt_object.write(oprot)
- oprot.writeFieldEnd()
- if self.common is not None:
- oprot.writeFieldBegin('common', TType.STRUCT, 2)
- self.common.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.bolt_object is None:
- raise TProtocol.TProtocolException(message='Required field bolt_object is unset!')
- if self.common is None:
- raise TProtocol.TProtocolException(message='Required field common is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class StateSpoutSpec:
- """
- Attributes:
- - state_spout_object
- - common
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'state_spout_object', (ComponentObject, ComponentObject.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'common', (ComponentCommon, ComponentCommon.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.state_spout_object) + hash(self.common)
-
- def __init__(self, state_spout_object=None, common=None,):
- self.state_spout_object = state_spout_object
- self.common = common
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.state_spout_object = ComponentObject()
- self.state_spout_object.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.common = ComponentCommon()
- self.common.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('StateSpoutSpec')
- if self.state_spout_object is not None:
- oprot.writeFieldBegin('state_spout_object', TType.STRUCT, 1)
- self.state_spout_object.write(oprot)
- oprot.writeFieldEnd()
- if self.common is not None:
- oprot.writeFieldBegin('common', TType.STRUCT, 2)
- self.common.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.state_spout_object is None:
- raise TProtocol.TProtocolException(message='Required field state_spout_object is unset!')
- if self.common is None:
- raise TProtocol.TProtocolException(message='Required field common is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class StormTopology:
- """
- Attributes:
- - spouts
- - bolts
- - state_spouts
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.MAP, 'spouts', (TType.STRING,None,TType.STRUCT,(SpoutSpec, SpoutSpec.thrift_spec)), None, ), # 1
- (2, TType.MAP, 'bolts', (TType.STRING,None,TType.STRUCT,(Bolt, Bolt.thrift_spec)), None, ), # 2
- (3, TType.MAP, 'state_spouts', (TType.STRING,None,TType.STRUCT,(StateSpoutSpec, StateSpoutSpec.thrift_spec)), None, ), # 3
- )
-
- def __hash__(self):
- return 0 + hash(self.spouts) + hash(self.bolts) + hash(self.state_spouts)
-
- def __init__(self, spouts=None, bolts=None, state_spouts=None,):
- self.spouts = spouts
- self.bolts = bolts
- self.state_spouts = state_spouts
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.MAP:
- self.spouts = {}
- (_ktype40, _vtype41, _size39 ) = iprot.readMapBegin()
- for _i43 in xrange(_size39):
- _key44 = iprot.readString().decode('utf-8')
- _val45 = SpoutSpec()
- _val45.read(iprot)
- self.spouts[_key44] = _val45
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.MAP:
- self.bolts = {}
- (_ktype47, _vtype48, _size46 ) = iprot.readMapBegin()
- for _i50 in xrange(_size46):
- _key51 = iprot.readString().decode('utf-8')
- _val52 = Bolt()
- _val52.read(iprot)
- self.bolts[_key51] = _val52
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.MAP:
- self.state_spouts = {}
- (_ktype54, _vtype55, _size53 ) = iprot.readMapBegin()
- for _i57 in xrange(_size53):
- _key58 = iprot.readString().decode('utf-8')
- _val59 = StateSpoutSpec()
- _val59.read(iprot)
- self.state_spouts[_key58] = _val59
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('StormTopology')
- if self.spouts is not None:
- oprot.writeFieldBegin('spouts', TType.MAP, 1)
- oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.spouts))
- for kiter60,viter61 in self.spouts.items():
- oprot.writeString(kiter60.encode('utf-8'))
- viter61.write(oprot)
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.bolts is not None:
- oprot.writeFieldBegin('bolts', TType.MAP, 2)
- oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.bolts))
- for kiter62,viter63 in self.bolts.items():
- oprot.writeString(kiter62.encode('utf-8'))
- viter63.write(oprot)
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.state_spouts is not None:
- oprot.writeFieldBegin('state_spouts', TType.MAP, 3)
- oprot.writeMapBegin(TType.STRING, TType.STRUCT, len(self.state_spouts))
- for kiter64,viter65 in self.state_spouts.items():
- oprot.writeString(kiter64.encode('utf-8'))
- viter65.write(oprot)
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.spouts is None:
- raise TProtocol.TProtocolException(message='Required field spouts is unset!')
- if self.bolts is None:
- raise TProtocol.TProtocolException(message='Required field bolts is unset!')
- if self.state_spouts is None:
- raise TProtocol.TProtocolException(message='Required field state_spouts is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class TopologyAssignException(Exception):
- """
- Attributes:
- - msg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'msg', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.msg)
-
- def __init__(self, msg=None,):
- self.msg = msg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.msg = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('TopologyAssignException')
- if self.msg is not None:
- oprot.writeFieldBegin('msg', TType.STRING, 1)
- oprot.writeString(self.msg.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.msg is None:
- raise TProtocol.TProtocolException(message='Required field msg is unset!')
- return
-
-
- def __str__(self):
- return repr(self)
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class AlreadyAliveException(Exception):
- """
- Attributes:
- - msg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'msg', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.msg)
-
- def __init__(self, msg=None,):
- self.msg = msg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.msg = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('AlreadyAliveException')
- if self.msg is not None:
- oprot.writeFieldBegin('msg', TType.STRING, 1)
- oprot.writeString(self.msg.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.msg is None:
- raise TProtocol.TProtocolException(message='Required field msg is unset!')
- return
-
-
- def __str__(self):
- return repr(self)
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class NotAliveException(Exception):
- """
- Attributes:
- - msg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'msg', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.msg)
-
- def __init__(self, msg=None,):
- self.msg = msg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.msg = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('NotAliveException')
- if self.msg is not None:
- oprot.writeFieldBegin('msg', TType.STRING, 1)
- oprot.writeString(self.msg.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.msg is None:
- raise TProtocol.TProtocolException(message='Required field msg is unset!')
- return
-
-
- def __str__(self):
- return repr(self)
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class InvalidTopologyException(Exception):
- """
- Attributes:
- - msg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'msg', None, None, ), # 1
- )
-
- def __hash__(self):
- return 0 + hash(self.msg)
-
- def __init__(self, msg=None,):
- self.msg = msg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.msg = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('InvalidTopologyException')
- if self.msg is not None:
- oprot.writeFieldBegin('msg', TType.STRING, 1)
- oprot.writeString(self.msg.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.msg is None:
- raise TProtocol.TProtocolException(message='Required field msg is unset!')
- return
-
-
- def __str__(self):
- return repr(self)
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class TopologySummary:
- """
- Attributes:
- - id
- - name
- - status
- - uptime_secs
- - num_tasks
- - num_workers
- - error_info
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'id', None, None, ), # 1
- (2, TType.STRING, 'name', None, None, ), # 2
- (3, TType.STRING, 'status', None, None, ), # 3
- (4, TType.I32, 'uptime_secs', None, None, ), # 4
- (5, TType.I32, 'num_tasks', None, None, ), # 5
- (6, TType.I32, 'num_workers', None, None, ), # 6
- (7, TType.STRING, 'error_info', None, None, ), # 7
- )
-
- def __hash__(self):
- return 0 + hash(self.id) + hash(self.name) + hash(self.status) + hash(self.uptime_secs) + hash(self.num_tasks) + hash(self.num_workers) + hash(self.error_info)
-
- def __init__(self, id=None, name=None, status=None, uptime_secs=None, num_tasks=None, num_workers=None, error_info=None,):
- self.id = id
- self.name = name
- self.status = status
- self.uptime_secs = uptime_secs
- self.num_tasks = num_tasks
- self.num_workers = num_workers
- self.error_info = error_info
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.id = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.name = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.STRING:
- self.status = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.I32:
- self.uptime_secs = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.I32:
- self.num_tasks = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 6:
- if ftype == TType.I32:
- self.num_workers = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 7:
- if ftype == TType.STRING:
- self.error_info = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('TopologySummary')
- if self.id is not None:
- oprot.writeFieldBegin('id', TType.STRING, 1)
- oprot.writeString(self.id.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.name is not None:
- oprot.writeFieldBegin('name', TType.STRING, 2)
- oprot.writeString(self.name.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.status is not None:
- oprot.writeFieldBegin('status', TType.STRING, 3)
- oprot.writeString(self.status.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.uptime_secs is not None:
- oprot.writeFieldBegin('uptime_secs', TType.I32, 4)
- oprot.writeI32(self.uptime_secs)
- oprot.writeFieldEnd()
- if self.num_tasks is not None:
- oprot.writeFieldBegin('num_tasks', TType.I32, 5)
- oprot.writeI32(self.num_tasks)
- oprot.writeFieldEnd()
- if self.num_workers is not None:
- oprot.writeFieldBegin('num_workers', TType.I32, 6)
- oprot.writeI32(self.num_workers)
- oprot.writeFieldEnd()
- if self.error_info is not None:
- oprot.writeFieldBegin('error_info', TType.STRING, 7)
- oprot.writeString(self.error_info.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.id is None:
- raise TProtocol.TProtocolException(message='Required field id is unset!')
- if self.name is None:
- raise TProtocol.TProtocolException(message='Required field name is unset!')
- if self.status is None:
- raise TProtocol.TProtocolException(message='Required field status is unset!')
- if self.uptime_secs is None:
- raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
- if self.num_tasks is None:
- raise TProtocol.TProtocolException(message='Required field num_tasks is unset!')
- if self.num_workers is None:
- raise TProtocol.TProtocolException(message='Required field num_workers is unset!')
- if self.error_info is None:
- raise TProtocol.TProtocolException(message='Required field error_info is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class SupervisorSummary:
- """
- Attributes:
- - host
- - supervisor_id
- - uptime_secs
- - num_workers
- - num_used_workers
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'host', None, None, ), # 1
- (2, TType.STRING, 'supervisor_id', None, None, ), # 2
- (3, TType.I32, 'uptime_secs', None, None, ), # 3
- (4, TType.I32, 'num_workers', None, None, ), # 4
- (5, TType.I32, 'num_used_workers', None, None, ), # 5
- )
-
- def __hash__(self):
- return 0 + hash(self.host) + hash(self.supervisor_id) + hash(self.uptime_secs) + hash(self.num_workers) + hash(self.num_used_workers)
-
- def __init__(self, host=None, supervisor_id=None, uptime_secs=None, num_workers=None, num_used_workers=None,):
- self.host = host
- self.supervisor_id = supervisor_id
- self.uptime_secs = uptime_secs
- self.num_workers = num_workers
- self.num_used_workers = num_used_workers
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.host = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRING:
- self.supervisor_id = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.I32:
- self.uptime_secs = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.I32:
- self.num_workers = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.I32:
- self.num_used_workers = iprot.readI32();
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('SupervisorSummary')
- if self.host is not None:
- oprot.writeFieldBegin('host', TType.STRING, 1)
- oprot.writeString(self.host.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.supervisor_id is not None:
- oprot.writeFieldBegin('supervisor_id', TType.STRING, 2)
- oprot.writeString(self.supervisor_id.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.uptime_secs is not None:
- oprot.writeFieldBegin('uptime_secs', TType.I32, 3)
- oprot.writeI32(self.uptime_secs)
- oprot.writeFieldEnd()
- if self.num_workers is not None:
- oprot.writeFieldBegin('num_workers', TType.I32, 4)
- oprot.writeI32(self.num_workers)
- oprot.writeFieldEnd()
- if self.num_used_workers is not None:
- oprot.writeFieldBegin('num_used_workers', TType.I32, 5)
- oprot.writeI32(self.num_used_workers)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.host is None:
- raise TProtocol.TProtocolException(message='Required field host is unset!')
- if self.supervisor_id is None:
- raise TProtocol.TProtocolException(message='Required field supervisor_id is unset!')
- if self.uptime_secs is None:
- raise TProtocol.TProtocolException(message='Required field uptime_secs is unset!')
- if self.num_workers is None:
- raise TProtocol.TProtocolException(message='Required field num_workers is unset!')
- if self.num_used_workers is None:
- raise TProtocol.TProtocolException(message='Required field num_used_workers is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ClusterSummary:
- """
- Attributes:
- - supervisors
- - nimbus_uptime_secs
- - topologies
- - version
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.LIST, 'supervisors', (TType.STRUCT,(SupervisorSummary, SupervisorSummary.thrift_spec)), None, ), # 1
- (2, TType.I32, 'nimbus_uptime_secs', None, None, ), # 2
- (3, TType.LIST, 'topologies', (TType.STRUCT,(TopologySummary, TopologySummary.thrift_spec)), None, ), # 3
- (4, TType.STRING, 'version', None, None, ), # 4
- )
-
- def __hash__(self):
- return 0 + hash(self.supervisors) + hash(self.nimbus_uptime_secs) + hash(self.topologies) + hash(self.version)
-
- def __init__(self, supervisors=None, nimbus_uptime_secs=None, topologies=None, version=None,):
- self.supervisors = supervisors
- self.nimbus_uptime_secs = nimbus_uptime_secs
- self.topologies = topologies
- self.version = version
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.LIST:
- self.supervisors = []
- (_etype69, _size66) = iprot.readListBegin()
- for _i70 in xrange(_size66):
- _elem71 = SupervisorSummary()
- _elem71.read(iprot)
- self.supervisors.append(_elem71)
- iprot.readListEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.I32:
- self.nimbus_uptime_secs = iprot.readI32();
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.LIST:
- self.topologies = []
- (_etype75, _size72) = iprot.readListBegin()
- for _i76 in xrange(_size72):
- _elem77 = TopologySummary()
- _elem77.read(iprot)
- self.topologies.append(_elem77)
- iprot.readListEnd()
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.STRING:
- self.version = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ClusterSummary')
- if self.supervisors is not None:
- oprot.writeFieldBegin('supervisors', TType.LIST, 1)
- oprot.writeListBegin(TType.STRUCT, len(self.supervisors))
- for iter78 in self.supervisors:
- iter78.write(oprot)
- oprot.writeListEnd()
- oprot.writeFieldEnd()
- if self.nimbus_uptime_secs is not None:
- oprot.writeFieldBegin('nimbus_uptime_secs', TType.I32, 2)
- oprot.writeI32(self.nimbus_uptime_secs)
- oprot.writeFieldEnd()
- if self.topologies is not None:
- oprot.writeFieldBegin('topologies', TType.LIST, 3)
- oprot.writeListBegin(TType.STRUCT, len(self.topologies))
- for iter79 in self.topologies:
- iter79.write(oprot)
- oprot.writeListEnd()
- oprot.writeFieldEnd()
- if self.version is not None:
- oprot.writeFieldBegin('version', TType.STRING, 4)
- oprot.writeString(self.version.encode('utf-8'))
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.supervisors is None:
- raise TProtocol.TProtocolException(message='Required field supervisors is unset!')
- if self.nimbus_uptime_secs is None:
- raise TProtocol.TProtocolException(message='Required field nimbus_uptime_secs is unset!')
- if self.topologies is None:
- raise TProtocol.TProtocolException(message='Required field topologies is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ErrorInfo:
- """
- Attributes:
- - error
- - error_time_secs
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRING, 'error', None, None, ), # 1
- (2, TType.I32, 'error_time_secs', None, None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.error) + hash(self.error_time_secs)
-
- def __init__(self, error=None, error_time_secs=None,):
- self.error = error
- self.error_time_secs = error_time_secs
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRING:
- self.error = iprot.readString().decode('utf-8')
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.I32:
- self.error_time_secs = iprot.readI32();
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ErrorInfo')
- if self.error is not None:
- oprot.writeFieldBegin('error', TType.STRING, 1)
- oprot.writeString(self.error.encode('utf-8'))
- oprot.writeFieldEnd()
- if self.error_time_secs is not None:
- oprot.writeFieldBegin('error_time_secs', TType.I32, 2)
- oprot.writeI32(self.error_time_secs)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.error is None:
- raise TProtocol.TProtocolException(message='Required field error is unset!')
- if self.error_time_secs is None:
- raise TProtocol.TProtocolException(message='Required field error_time_secs is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class BoltStats:
- """
- Attributes:
- - acked
- - failed
- - process_ms_avg
- - executed
- - execute_ms_avg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.MAP, 'acked', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 1
- (2, TType.MAP, 'failed', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 2
- (3, TType.MAP, 'process_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.DOUBLE,None)), None, ), # 3
- (4, TType.MAP, 'executed', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 4
- (5, TType.MAP, 'execute_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.DOUBLE,None)), None, ), # 5
- )
-
- def __hash__(self):
- return 0 + hash(self.acked) + hash(self.failed) + hash(self.process_ms_avg) + hash(self.executed) + hash(self.execute_ms_avg)
-
- def __init__(self, acked=None, failed=None, process_ms_avg=None, executed=None, execute_ms_avg=None,):
- self.acked = acked
- self.failed = failed
- self.process_ms_avg = process_ms_avg
- self.executed = executed
- self.execute_ms_avg = execute_ms_avg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.MAP:
- self.acked = {}
- (_ktype81, _vtype82, _size80 ) = iprot.readMapBegin()
- for _i84 in xrange(_size80):
- _key85 = iprot.readString().decode('utf-8')
- _val86 = {}
- (_ktype88, _vtype89, _size87 ) = iprot.readMapBegin()
- for _i91 in xrange(_size87):
- _key92 = GlobalStreamId()
- _key92.read(iprot)
- _val93 = iprot.readI64();
- _val86[_key92] = _val93
- iprot.readMapEnd()
- self.acked[_key85] = _val86
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.MAP:
- self.failed = {}
- (_ktype95, _vtype96, _size94 ) = iprot.readMapBegin()
- for _i98 in xrange(_size94):
- _key99 = iprot.readString().decode('utf-8')
- _val100 = {}
- (_ktype102, _vtype103, _size101 ) = iprot.readMapBegin()
- for _i105 in xrange(_size101):
- _key106 = GlobalStreamId()
- _key106.read(iprot)
- _val107 = iprot.readI64();
- _val100[_key106] = _val107
- iprot.readMapEnd()
- self.failed[_key99] = _val100
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.MAP:
- self.process_ms_avg = {}
- (_ktype109, _vtype110, _size108 ) = iprot.readMapBegin()
- for _i112 in xrange(_size108):
- _key113 = iprot.readString().decode('utf-8')
- _val114 = {}
- (_ktype116, _vtype117, _size115 ) = iprot.readMapBegin()
- for _i119 in xrange(_size115):
- _key120 = GlobalStreamId()
- _key120.read(iprot)
- _val121 = iprot.readDouble();
- _val114[_key120] = _val121
- iprot.readMapEnd()
- self.process_ms_avg[_key113] = _val114
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 4:
- if ftype == TType.MAP:
- self.executed = {}
- (_ktype123, _vtype124, _size122 ) = iprot.readMapBegin()
- for _i126 in xrange(_size122):
- _key127 = iprot.readString().decode('utf-8')
- _val128 = {}
- (_ktype130, _vtype131, _size129 ) = iprot.readMapBegin()
- for _i133 in xrange(_size129):
- _key134 = GlobalStreamId()
- _key134.read(iprot)
- _val135 = iprot.readI64();
- _val128[_key134] = _val135
- iprot.readMapEnd()
- self.executed[_key127] = _val128
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 5:
- if ftype == TType.MAP:
- self.execute_ms_avg = {}
- (_ktype137, _vtype138, _size136 ) = iprot.readMapBegin()
- for _i140 in xrange(_size136):
- _key141 = iprot.readString().decode('utf-8')
- _val142 = {}
- (_ktype144, _vtype145, _size143 ) = iprot.readMapBegin()
- for _i147 in xrange(_size143):
- _key148 = GlobalStreamId()
- _key148.read(iprot)
- _val149 = iprot.readDouble();
- _val142[_key148] = _val149
- iprot.readMapEnd()
- self.execute_ms_avg[_key141] = _val142
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('BoltStats')
- if self.acked is not None:
- oprot.writeFieldBegin('acked', TType.MAP, 1)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
- for kiter150,viter151 in self.acked.items():
- oprot.writeString(kiter150.encode('utf-8'))
- oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter151))
- for kiter152,viter153 in viter151.items():
- kiter152.write(oprot)
- oprot.writeI64(viter153)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.failed is not None:
- oprot.writeFieldBegin('failed', TType.MAP, 2)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
- for kiter154,viter155 in self.failed.items():
- oprot.writeString(kiter154.encode('utf-8'))
- oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter155))
- for kiter156,viter157 in viter155.items():
- kiter156.write(oprot)
- oprot.writeI64(viter157)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.process_ms_avg is not None:
- oprot.writeFieldBegin('process_ms_avg', TType.MAP, 3)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.process_ms_avg))
- for kiter158,viter159 in self.process_ms_avg.items():
- oprot.writeString(kiter158.encode('utf-8'))
- oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter159))
- for kiter160,viter161 in viter159.items():
- kiter160.write(oprot)
- oprot.writeDouble(viter161)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.executed is not None:
- oprot.writeFieldBegin('executed', TType.MAP, 4)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.executed))
- for kiter162,viter163 in self.executed.items():
- oprot.writeString(kiter162.encode('utf-8'))
- oprot.writeMapBegin(TType.STRUCT, TType.I64, len(viter163))
- for kiter164,viter165 in viter163.items():
- kiter164.write(oprot)
- oprot.writeI64(viter165)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.execute_ms_avg is not None:
- oprot.writeFieldBegin('execute_ms_avg', TType.MAP, 5)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.execute_ms_avg))
- for kiter166,viter167 in self.execute_ms_avg.items():
- oprot.writeString(kiter166.encode('utf-8'))
- oprot.writeMapBegin(TType.STRUCT, TType.DOUBLE, len(viter167))
- for kiter168,viter169 in viter167.items():
- kiter168.write(oprot)
- oprot.writeDouble(viter169)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.acked is None:
- raise TProtocol.TProtocolException(message='Required field acked is unset!')
- if self.failed is None:
- raise TProtocol.TProtocolException(message='Required field failed is unset!')
- if self.process_ms_avg is None:
- raise TProtocol.TProtocolException(message='Required field process_ms_avg is unset!')
- if self.executed is None:
- raise TProtocol.TProtocolException(message='Required field executed is unset!')
- if self.execute_ms_avg is None:
- raise TProtocol.TProtocolException(message='Required field execute_ms_avg is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class SpoutStats:
- """
- Attributes:
- - acked
- - failed
- - complete_ms_avg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.MAP, 'acked', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 1
- (2, TType.MAP, 'failed', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 2
- (3, TType.MAP, 'complete_ms_avg', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.DOUBLE,None)), None, ), # 3
- )
-
- def __hash__(self):
- return 0 + hash(self.acked) + hash(self.failed) + hash(self.complete_ms_avg)
-
- def __init__(self, acked=None, failed=None, complete_ms_avg=None,):
- self.acked = acked
- self.failed = failed
- self.complete_ms_avg = complete_ms_avg
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.MAP:
- self.acked = {}
- (_ktype171, _vtype172, _size170 ) = iprot.readMapBegin()
- for _i174 in xrange(_size170):
- _key175 = iprot.readString().decode('utf-8')
- _val176 = {}
- (_ktype178, _vtype179, _size177 ) = iprot.readMapBegin()
- for _i181 in xrange(_size177):
- _key182 = iprot.readString().decode('utf-8')
- _val183 = iprot.readI64();
- _val176[_key182] = _val183
- iprot.readMapEnd()
- self.acked[_key175] = _val176
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.MAP:
- self.failed = {}
- (_ktype185, _vtype186, _size184 ) = iprot.readMapBegin()
- for _i188 in xrange(_size184):
- _key189 = iprot.readString().decode('utf-8')
- _val190 = {}
- (_ktype192, _vtype193, _size191 ) = iprot.readMapBegin()
- for _i195 in xrange(_size191):
- _key196 = iprot.readString().decode('utf-8')
- _val197 = iprot.readI64();
- _val190[_key196] = _val197
- iprot.readMapEnd()
- self.failed[_key189] = _val190
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- elif fid == 3:
- if ftype == TType.MAP:
- self.complete_ms_avg = {}
- (_ktype199, _vtype200, _size198 ) = iprot.readMapBegin()
- for _i202 in xrange(_size198):
- _key203 = iprot.readString().decode('utf-8')
- _val204 = {}
- (_ktype206, _vtype207, _size205 ) = iprot.readMapBegin()
- for _i209 in xrange(_size205):
- _key210 = iprot.readString().decode('utf-8')
- _val211 = iprot.readDouble();
- _val204[_key210] = _val211
- iprot.readMapEnd()
- self.complete_ms_avg[_key203] = _val204
- iprot.readMapEnd()
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('SpoutStats')
- if self.acked is not None:
- oprot.writeFieldBegin('acked', TType.MAP, 1)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.acked))
- for kiter212,viter213 in self.acked.items():
- oprot.writeString(kiter212.encode('utf-8'))
- oprot.writeMapBegin(TType.STRING, TType.I64, len(viter213))
- for kiter214,viter215 in viter213.items():
- oprot.writeString(kiter214.encode('utf-8'))
- oprot.writeI64(viter215)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.failed is not None:
- oprot.writeFieldBegin('failed', TType.MAP, 2)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.failed))
- for kiter216,viter217 in self.failed.items():
- oprot.writeString(kiter216.encode('utf-8'))
- oprot.writeMapBegin(TType.STRING, TType.I64, len(viter217))
- for kiter218,viter219 in viter217.items():
- oprot.writeString(kiter218.encode('utf-8'))
- oprot.writeI64(viter219)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- if self.complete_ms_avg is not None:
- oprot.writeFieldBegin('complete_ms_avg', TType.MAP, 3)
- oprot.writeMapBegin(TType.STRING, TType.MAP, len(self.complete_ms_avg))
- for kiter220,viter221 in self.complete_ms_avg.items():
- oprot.writeString(kiter220.encode('utf-8'))
- oprot.writeMapBegin(TType.STRING, TType.DOUBLE, len(viter221))
- for kiter222,viter223 in viter221.items():
- oprot.writeString(kiter222.encode('utf-8'))
- oprot.writeDouble(viter223)
- oprot.writeMapEnd()
- oprot.writeMapEnd()
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- if self.acked is None:
- raise TProtocol.TProtocolException(message='Required field acked is unset!')
- if self.failed is None:
- raise TProtocol.TProtocolException(message='Required field failed is unset!')
- if self.complete_ms_avg is None:
- raise TProtocol.TProtocolException(message='Required field complete_ms_avg is unset!')
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class ExecutorSpecificStats:
- """
- Attributes:
- - bolt
- - spout
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.STRUCT, 'bolt', (BoltStats, BoltStats.thrift_spec), None, ), # 1
- (2, TType.STRUCT, 'spout', (SpoutStats, SpoutStats.thrift_spec), None, ), # 2
- )
-
- def __hash__(self):
- return 0 + hash(self.bolt) + hash(self.spout)
-
- def __init__(self, bolt=None, spout=None,):
- self.bolt = bolt
- self.spout = spout
-
- def read(self, iprot):
- if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
- fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
- return
- iprot.readStructBegin()
- while True:
- (fname, ftype, fid) = iprot.readFieldBegin()
- if ftype == TType.STOP:
- break
- if fid == 1:
- if ftype == TType.STRUCT:
- self.bolt = BoltStats()
- self.bolt.read(iprot)
- else:
- iprot.skip(ftype)
- elif fid == 2:
- if ftype == TType.STRUCT:
- self.spout = SpoutStats()
- self.spout.read(iprot)
- else:
- iprot.skip(ftype)
- else:
- iprot.skip(ftype)
- iprot.readFieldEnd()
- iprot.readStructEnd()
-
- def write(self, oprot):
- if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
- oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
- return
- oprot.writeStructBegin('ExecutorSpecificStats')
- if self.bolt is not None:
- oprot.writeFieldBegin('bolt', TType.STRUCT, 1)
- self.bolt.write(oprot)
- oprot.writeFieldEnd()
- if self.spout is not None:
- oprot.writeFieldBegin('spout', TType.STRUCT, 2)
- self.spout.write(oprot)
- oprot.writeFieldEnd()
- oprot.writeFieldStop()
- oprot.writeStructEnd()
-
- def validate(self):
- return
-
-
- def __repr__(self):
- L = ['%s=%r' % (key, value)
- for key, value in self.__dict__.iteritems()]
- return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
-
- def __eq__(self, other):
- return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not (self == other)
-
-class TaskStats:
- """
- Attributes:
- - emitted
- - send_tps
- - recv_tps
- - acked
- - failed
- - process_ms_avg
- """
-
- thrift_spec = (
- None, # 0
- (1, TType.MAP, 'emitted', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.I64,None)), None, ), # 1
- (2, TType.MAP, 'send_tps', (TType.STRING,None,TType.MAP,(TType.STRING,None,TType.DOUBLE,None)), None, ), # 2
- (3, TType.MAP, 'recv_tps', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.DOUBLE,None)), None, ), # 3
- (4, TType.MAP, 'acked', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 4
- (5, TType.MAP, 'failed', (TType.STRING,None,TType.MAP,(TType.STRUCT,(GlobalStreamId, GlobalStreamId.thrift_spec),TType.I64,None)), None, ), # 5
- (6, TType.MAP, 'process_ms_avg', (TType.STRING,None,TType.MA
<TRUNCATED>
[60/60] [abbrv] storm git commit: remove IDE-specific project files
Posted by pt...@apache.org.
remove IDE-specific project files
Project: http://git-wip-us.apache.org/repos/asf/storm/repo
Commit: http://git-wip-us.apache.org/repos/asf/storm/commit/dbc1236f
Tree: http://git-wip-us.apache.org/repos/asf/storm/tree/dbc1236f
Diff: http://git-wip-us.apache.org/repos/asf/storm/diff/dbc1236f
Branch: refs/heads/jstorm-import
Commit: dbc1236fb84fdbe90999dc32ffc0f92b41f1bf16
Parents: 5744ac3
Author: P. Taylor Goetz <pt...@gmail.com>
Authored: Thu Nov 5 15:36:33 2015 -0500
Committer: P. Taylor Goetz <pt...@gmail.com>
Committed: Thu Nov 5 15:36:33 2015 -0500
----------------------------------------------------------------------
example/sequence-split-merge/.classpath | 31 ---------------
example/sequence-split-merge/.project | 23 -----------
.../.settings/org.eclipse.core.resources.prefs | 5 ---
.../.settings/org.eclipse.jdt.core.prefs | 5 ---
.../.settings/org.eclipse.m2e.core.prefs | 4 --
jstorm-core/.classpath | 37 -----------------
jstorm-core/.project | 36 -----------------
.../.settings/org.eclipse.core.resources.prefs | 6 ---
.../.settings/org.eclipse.jdt.core.prefs | 8 ----
.../.settings/org.eclipse.m2e.core.prefs | 4 --
.../.settings/org.eclipse.wst.common.component | 6 ---
...rg.eclipse.wst.common.project.facet.core.xml | 5 ---
.../.settings/org.eclipse.wst.validation.prefs | 2 -
jstorm-ui/.classpath | 32 ---------------
jstorm-ui/.project | 42 --------------------
jstorm-ui/.settings/.jsdtscope | 13 ------
.../.settings/org.eclipse.core.resources.prefs | 4 --
jstorm-ui/.settings/org.eclipse.jdt.core.prefs | 8 ----
jstorm-ui/.settings/org.eclipse.m2e.core.prefs | 4 --
.../.settings/org.eclipse.wst.common.component | 13 ------
...ipse.wst.common.project.facet.core.prefs.xml | 12 ------
...rg.eclipse.wst.common.project.facet.core.xml | 9 -----
.../org.eclipse.wst.jsdt.ui.superType.container | 1 -
.../org.eclipse.wst.jsdt.ui.superType.name | 1 -
.../.settings/org.eclipse.wst.validation.prefs | 2 -
25 files changed, 313 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/example/sequence-split-merge/.classpath
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.classpath b/example/sequence-split-merge/.classpath
deleted file mode 100755
index f8ce0d3..0000000
--- a/example/sequence-split-merge/.classpath
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" output="target/classes" path="src/main/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="src" output="target/test-classes" path="src/test/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="output" path="target/classes"/>
-</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/example/sequence-split-merge/.project
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.project b/example/sequence-split-merge/.project
deleted file mode 100755
index 4269e67..0000000
--- a/example/sequence-split-merge/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>sequence-split-merge</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.m2e.core.maven2Builder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.jdt.core.javanature</nature>
- <nature>org.eclipse.m2e.core.maven2Nature</nature>
- </natures>
-</projectDescription>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs b/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
deleted file mode 100755
index 8bc0e1c..0000000
--- a/example/sequence-split-merge/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,5 +0,0 @@
-eclipse.preferences.version=1
-encoding//src/main/java=UTF-8
-encoding//src/test/java=UTF-8
-encoding//src/test/resources=UTF-8
-encoding/<project>=UTF-8
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs b/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100755
index 14f521d..0000000
--- a/example/sequence-split-merge/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,5 +0,0 @@
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs b/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
deleted file mode 100755
index 14b697b..0000000
--- a/example/sequence-split-merge/.settings/org.eclipse.m2e.core.prefs
+++ /dev/null
@@ -1,4 +0,0 @@
-activeProfiles=
-eclipse.preferences.version=1
-resolveWorkspaceProjects=true
-version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.classpath
----------------------------------------------------------------------
diff --git a/jstorm-core/.classpath b/jstorm-core/.classpath
deleted file mode 100755
index f0a60b6..0000000
--- a/jstorm-core/.classpath
+++ /dev/null
@@ -1,37 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" output="target/classes" path="src/main/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="src" output="target/test-classes" path="src/test/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry excluding="**" kind="src" output="target/test-classes" path="src/test/resources">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- <attribute name="org.eclipse.jst.component.nondependency" value=""/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.7">
- <attributes>
- <attribute name="owner.project.facets" value="java"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="output" path="target/classes"/>
-</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.project
----------------------------------------------------------------------
diff --git a/jstorm-core/.project b/jstorm-core/.project
deleted file mode 100755
index 617507f..0000000
--- a/jstorm-core/.project
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>jstorm-core</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.wst.common.project.facet.core.builder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.wst.validation.validationbuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.m2e.core.maven2Builder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
- <nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- <nature>org.eclipse.m2e.core.maven2Nature</nature>
- <nature>org.eclipse.wst.common.project.facet.core.nature</nature>
- </natures>
-</projectDescription>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.core.resources.prefs b/jstorm-core/.settings/org.eclipse.core.resources.prefs
deleted file mode 100755
index 04cfa2c..0000000
--- a/jstorm-core/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,6 +0,0 @@
-eclipse.preferences.version=1
-encoding//src/main/java=UTF-8
-encoding//src/main/resources=UTF-8
-encoding//src/test/java=UTF-8
-encoding//src/test/resources=UTF-8
-encoding/<project>=UTF-8
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.jdt.core.prefs b/jstorm-core/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100755
index c788ee3..0000000
--- a/jstorm-core/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,8 +0,0 @@
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
-org.eclipse.jdt.core.compiler.compliance=1.7
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.7
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.m2e.core.prefs b/jstorm-core/.settings/org.eclipse.m2e.core.prefs
deleted file mode 100755
index 14b697b..0000000
--- a/jstorm-core/.settings/org.eclipse.m2e.core.prefs
+++ /dev/null
@@ -1,4 +0,0 @@
-activeProfiles=
-eclipse.preferences.version=1
-resolveWorkspaceProjects=true
-version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.wst.common.component
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.common.component b/jstorm-core/.settings/org.eclipse.wst.common.component
deleted file mode 100755
index aaa3793..0000000
--- a/jstorm-core/.settings/org.eclipse.wst.common.component
+++ /dev/null
@@ -1,6 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
- <wb-module deploy-name="jstorm-core">
- <wb-resource deploy-path="/" source-path="/src/main/java"/>
- <wb-resource deploy-path="/" source-path="/src/main/resources"/>
- </wb-module>
-</project-modules>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml b/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
deleted file mode 100755
index 4f92af5..0000000
--- a/jstorm-core/.settings/org.eclipse.wst.common.project.facet.core.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<faceted-project>
- <installed facet="jst.utility" version="1.0"/>
- <installed facet="java" version="1.7"/>
-</faceted-project>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-core/.settings/org.eclipse.wst.validation.prefs
----------------------------------------------------------------------
diff --git a/jstorm-core/.settings/org.eclipse.wst.validation.prefs b/jstorm-core/.settings/org.eclipse.wst.validation.prefs
deleted file mode 100644
index 04cad8c..0000000
--- a/jstorm-core/.settings/org.eclipse.wst.validation.prefs
+++ /dev/null
@@ -1,2 +0,0 @@
-disabled=06target
-eclipse.preferences.version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.classpath
----------------------------------------------------------------------
diff --git a/jstorm-ui/.classpath b/jstorm-ui/.classpath
deleted file mode 100755
index 0f4a4c2..0000000
--- a/jstorm-ui/.classpath
+++ /dev/null
@@ -1,32 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" output="target/classes" path="src/main/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry excluding="**" kind="src" output="target/classes" path="src/main/resources">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="src" output="target/test-classes" path="src/test/java">
- <attributes>
- <attribute name="optional" value="true"/>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
- <attributes>
- <attribute name="maven.pomderived" value="true"/>
- <attribute name="org.eclipse.jst.component.dependency" value="/WEB-INF/lib"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="output" path="target/classes"/>
-</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.project
----------------------------------------------------------------------
diff --git a/jstorm-ui/.project b/jstorm-ui/.project
deleted file mode 100755
index af405e7..0000000
--- a/jstorm-ui/.project
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<projectDescription>
- <name>jstorm-ui</name>
- <comment></comment>
- <projects>
- </projects>
- <buildSpec>
- <buildCommand>
- <name>org.eclipse.wst.jsdt.core.javascriptValidator</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.jdt.core.javabuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.wst.common.project.facet.core.builder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.wst.validation.validationbuilder</name>
- <arguments>
- </arguments>
- </buildCommand>
- <buildCommand>
- <name>org.eclipse.m2e.core.maven2Builder</name>
- <arguments>
- </arguments>
- </buildCommand>
- </buildSpec>
- <natures>
- <nature>org.eclipse.jem.workbench.JavaEMFNature</nature>
- <nature>org.eclipse.wst.common.modulecore.ModuleCoreNature</nature>
- <nature>org.eclipse.jdt.core.javanature</nature>
- <nature>org.eclipse.m2e.core.maven2Nature</nature>
- <nature>org.eclipse.wst.common.project.facet.core.nature</nature>
- <nature>org.eclipse.wst.jsdt.core.jsNature</nature>
- </natures>
-</projectDescription>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/.jsdtscope
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/.jsdtscope b/jstorm-ui/.settings/.jsdtscope
deleted file mode 100755
index 585c967..0000000
--- a/jstorm-ui/.settings/.jsdtscope
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<classpath>
- <classpathentry kind="src" path="src/main/webapp"/>
- <classpathentry kind="src" path="target/m2e-wtp/web-resources"/>
- <classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.JRE_CONTAINER"/>
- <classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.WebProject">
- <attributes>
- <attribute name="hide" value="true"/>
- </attributes>
- </classpathentry>
- <classpathentry kind="con" path="org.eclipse.wst.jsdt.launching.baseBrowserLibrary"/>
- <classpathentry kind="output" path=""/>
-</classpath>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.core.resources.prefs
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.core.resources.prefs b/jstorm-ui/.settings/org.eclipse.core.resources.prefs
deleted file mode 100755
index cf6931b..0000000
--- a/jstorm-ui/.settings/org.eclipse.core.resources.prefs
+++ /dev/null
@@ -1,4 +0,0 @@
-eclipse.preferences.version=1
-encoding//src/main/java=UTF-8
-encoding//src/main/resources=UTF-8
-encoding/<project>=UTF-8
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.jdt.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.jdt.core.prefs b/jstorm-ui/.settings/org.eclipse.jdt.core.prefs
deleted file mode 100755
index 62a317c..0000000
--- a/jstorm-ui/.settings/org.eclipse.jdt.core.prefs
+++ /dev/null
@@ -1,8 +0,0 @@
-eclipse.preferences.version=1
-org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled
-org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
-org.eclipse.jdt.core.compiler.compliance=1.6
-org.eclipse.jdt.core.compiler.problem.assertIdentifier=error
-org.eclipse.jdt.core.compiler.problem.enumIdentifier=error
-org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
-org.eclipse.jdt.core.compiler.source=1.6
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.m2e.core.prefs
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.m2e.core.prefs b/jstorm-ui/.settings/org.eclipse.m2e.core.prefs
deleted file mode 100755
index 14b697b..0000000
--- a/jstorm-ui/.settings/org.eclipse.m2e.core.prefs
+++ /dev/null
@@ -1,4 +0,0 @@
-activeProfiles=
-eclipse.preferences.version=1
-resolveWorkspaceProjects=true
-version=1
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.common.component
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.common.component b/jstorm-ui/.settings/org.eclipse.wst.common.component
deleted file mode 100755
index c6036b5..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.common.component
+++ /dev/null
@@ -1,13 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?><project-modules id="moduleCoreId" project-version="1.5.0">
- <wb-module deploy-name="jstorm-ui">
- <wb-resource deploy-path="/" source-path="/target/m2e-wtp/web-resources"/>
- <wb-resource deploy-path="/" source-path="/src/main/webapp" tag="defaultRootSource"/>
- <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/java"/>
- <wb-resource deploy-path="/WEB-INF/classes" source-path="/src/main/resources"/>
- <dependent-module archiveName="jstorm-core-2.0.4-SNAPSHOT.jar" deploy-path="/WEB-INF/lib" handle="module:/resource/jstorm-core/jstorm-core">
- <dependency-type>uses</dependency-type>
- </dependent-module>
- <property name="context-root" value="jstorm-ui"/>
- <property name="java-output-path" value="/jstorm-ui/target/classes"/>
- </wb-module>
-</project-modules>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml b/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml
deleted file mode 100755
index 1ac9c29..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.prefs.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<root>
- <facet id="jst.jaxrs">
- <node name="libprov">
- <attribute name="provider-id" value="jaxrs-no-op-library-provider"/>
- </node>
- </facet>
- <facet id="jst.jsf">
- <node name="libprov">
- <attribute name="provider-id" value="jsf-no-op-library-provider"/>
- </node>
- </facet>
-</root>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.xml
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.xml b/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.xml
deleted file mode 100755
index a3dd227..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.common.project.facet.core.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<faceted-project>
- <fixed facet="wst.jsdt.web"/>
- <installed facet="java" version="1.6"/>
- <installed facet="jst.web" version="3.0"/>
- <installed facet="jst.jaxrs" version="1.1"/>
- <installed facet="jst.jsf" version="2.0"/>
- <installed facet="wst.jsdt.web" version="1.0"/>
-</faceted-project>
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.container
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.container b/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.container
deleted file mode 100755
index 3bd5d0a..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.container
+++ /dev/null
@@ -1 +0,0 @@
-org.eclipse.wst.jsdt.launching.baseBrowserLibrary
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.name
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.name b/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.name
deleted file mode 100755
index 05bd71b..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.jsdt.ui.superType.name
+++ /dev/null
@@ -1 +0,0 @@
-Window
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/dbc1236f/jstorm-ui/.settings/org.eclipse.wst.validation.prefs
----------------------------------------------------------------------
diff --git a/jstorm-ui/.settings/org.eclipse.wst.validation.prefs b/jstorm-ui/.settings/org.eclipse.wst.validation.prefs
deleted file mode 100755
index 6f1cba68..0000000
--- a/jstorm-ui/.settings/org.eclipse.wst.validation.prefs
+++ /dev/null
@@ -1,2 +0,0 @@
-disabled=06target
-eclipse.preferences.version=1
[10/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/Constants.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/Constants.java b/jstorm-core/src/main/java/backtype/storm/Constants.java
new file mode 100755
index 0000000..2797b69
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/Constants.java
@@ -0,0 +1,37 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import backtype.storm.coordination.CoordinatedBolt;
+import clojure.lang.RT;
+
+
+public class Constants {
+ public static final String COORDINATED_STREAM_ID = CoordinatedBolt.class.getName() + "/coord-stream";
+
+ public static final long SYSTEM_TASK_ID = -1;
+ public static final Object SYSTEM_EXECUTOR_ID = RT.readString("[-1 -1]");
+ public static final String SYSTEM_COMPONENT_ID = "__system";
+ public static final String SYSTEM_TICK_STREAM_ID = "__tick";
+ public static final String METRICS_COMPONENT_ID_PREFIX = "__metrics";
+ public static final String METRICS_STREAM_ID = "__metrics";
+ public static final String METRICS_TICK_STREAM_ID = "__metrics_tick";
+ public static final String CREDENTIALS_CHANGED_STREAM_ID = "__credentials";
+
+ public static final String JSTORM_CONF_DIR = "JSTORM_CONF_DIR";
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/GenericOptionsParser.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/GenericOptionsParser.java b/jstorm-core/src/main/java/backtype/storm/GenericOptionsParser.java
new file mode 100755
index 0000000..9319ce1
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/GenericOptionsParser.java
@@ -0,0 +1,283 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.LinkedHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.yaml.snakeyaml.Yaml;
+
+/**
+ * <code>GenericOptionsParser</code> is a utility to parse command line arguments generic to Storm.
+ *
+ * <code>GenericOptionsParser</code> recognizes several standard command line arguments, enabling applications to easily specify additional jar files,
+ * configuration resources, data files etc.
+ *
+ * <h4 id="GenericOptions">Generic Options</h4>
+ *
+ * <p>
+ * The supported generic options are:
+ * </p>
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * -conf <conf.xml> load configurations from
+ * <conf.xml>
+ * -conf <conf.yaml> load configurations from
+ * <conf.yaml>
+ * -D <key=value> set <key> in configuration to
+ * <value> (preserve value's type)
+ * -libjars <comma separated list of jars> specify comma separated jars to be
+ * used by the submitted topology
+ * </pre>
+ *
+ * </blockquote>
+ * </p>
+ *
+ * <b>Note:</b> The XML configuration file specified by <code>-conf</code> shall be readable by Hadoop's <a href=
+ * "http://hadoop.apache.org/docs/current/api/org/apache/hadoop/conf/Configuration.html" ><code>Configuration</code></a> class. Also note that all configuration
+ * values of an XML file will be treated as strings, and <b>not as specific types</b>.
+ *
+ * <p>
+ * The general command line syntax is:
+ * </p>
+ * <p>
+ * <tt><pre>
+ * storm jar app.jar [genericOptions] [commandOptions]
+ * </pre></tt>
+ * </p>
+ *
+ * <p>
+ * Generic command line arguments <strong>might</strong> modify <code>Config</code> objects, given to constructors.
+ * </p>
+ *
+ * <h4>Configuration priority</h4>
+ *
+ * The following list defines the priorities of different configuration sources, in ascending order. Thus, if a configuration appears in more than one of them,
+ * only the last one will take effect.
+ *
+ * <ul>
+ * <li> <code>defaults.yaml</code> in classpath.
+ * <li> <code>storm.yaml</code> in classpath.
+ * <li>Configurations from files specified with the <code>-conf</code> option, in the order of appearance.
+ * <li>Configurations defined with the <code>-D</code> option, in order of appearance.
+ * </ul>
+ *
+ * <p>
+ * The functionality is implemented using Commons CLI.
+ * </p>
+ *
+ * @see Tool
+ * @see ToolRunner
+ */
+
+public class GenericOptionsParser {
+ static final Logger LOG = LoggerFactory.getLogger(GenericOptionsParser.class);
+
+ static final Charset UTF8 = Charset.forName("UTF-8");
+
+ public static final String TOPOLOGY_LIB_PATH = "topology.lib.path";
+
+ public static final String TOPOLOGY_LIB_NAME = "topology.lib.name";
+
+ Config conf;
+
+ CommandLine commandLine;
+
+ // Order in this map is important for these purposes:
+ // - configuration priority
+ static final LinkedHashMap<String, OptionProcessor> optionProcessors = new LinkedHashMap<String, OptionProcessor>();
+
+ public GenericOptionsParser(Config conf, String[] args) throws ParseException {
+ this(conf, new Options(), args);
+ }
+
+ public GenericOptionsParser(Config conf, Options options, String[] args) throws ParseException {
+ this.conf = conf;
+ parseGeneralOptions(options, conf, args);
+ }
+
+ public String[] getRemainingArgs() {
+ return commandLine.getArgs();
+ }
+
+ public Config getConfiguration() {
+ return conf;
+ }
+
+ static Options buildGeneralOptions(Options opts) {
+ Options r = new Options();
+
+ for (Object o : opts.getOptions())
+ r.addOption((Option) o);
+
+ Option libjars = OptionBuilder.withArgName("paths").hasArg().withDescription("comma separated jars to be used by the submitted topology").create("libjars");
+ r.addOption(libjars);
+ optionProcessors.put("libjars", new LibjarsProcessor());
+
+ Option conf = OptionBuilder.withArgName("configuration file").hasArg().withDescription("an application configuration file").create("conf");
+ r.addOption(conf);
+ optionProcessors.put("conf", new ConfFileProcessor());
+
+ // Must come after `conf': this option is of higher priority
+ Option extraConfig = OptionBuilder.withArgName("D").hasArg().withDescription("extra configurations (preserving types)").create("D");
+ r.addOption(extraConfig);
+ optionProcessors.put("D", new ExtraConfigProcessor());
+
+ return r;
+ }
+
+ void parseGeneralOptions(Options opts, Config conf, String[] args) throws ParseException {
+ opts = buildGeneralOptions(opts);
+ CommandLineParser parser = new GnuParser();
+ commandLine = parser.parse(opts, args, true);
+ processGeneralOptions(conf, commandLine);
+ }
+
+ void processGeneralOptions(Config conf, CommandLine commandLine) throws ParseException {
+ for (Map.Entry<String, OptionProcessor> e : optionProcessors.entrySet())
+ if (commandLine.hasOption(e.getKey()))
+ e.getValue().process(conf, commandLine);
+ }
+
+ static List<File> validateFiles(String pathList) throws IOException {
+ List<File> l = new ArrayList<File>();
+
+ for (String s : pathList.split(",")) {
+ File file = new File(s);
+ if (!file.exists())
+ throw new FileNotFoundException("File `" + file.getAbsolutePath() + "' does not exist");
+
+ l.add(file);
+ }
+
+ return l;
+ }
+
+ public static void printGenericCommandUsage(PrintStream out) {
+ String[] strs =
+ new String[] {
+ "Generic options supported are",
+ " -conf <conf.xml> load configurations from",
+ " <conf.xml>",
+ " -conf <conf.yaml> load configurations from",
+ " <conf.yaml>",
+ " -D <key>=<value> set <key> in configuration",
+ " to <value> (preserve value's type)",
+ " -libjars <comma separated list of jars> specify comma separated",
+ " jars to be used by",
+ " the submitted topology", };
+ for (String s : strs)
+ out.println(s);
+ }
+
+ static interface OptionProcessor {
+ public void process(Config conf, CommandLine commandLine) throws ParseException;
+ }
+
+ static class LibjarsProcessor implements OptionProcessor {
+ @Override
+ public void process(Config conf, CommandLine commandLine) throws ParseException {
+ try {
+ List<File> jarFiles = validateFiles(commandLine.getOptionValue("libjars"));
+ Map<String, String> jars = new HashMap<String, String>(jarFiles.size());
+ List<String> names = new ArrayList<String>(jarFiles.size());
+ for (File f : jarFiles) {
+ jars.put(f.getName(), f.getAbsolutePath());
+ names.add(f.getName());
+ }
+ conf.put(TOPOLOGY_LIB_PATH, jars);
+ conf.put(TOPOLOGY_LIB_NAME, names);
+
+ } catch (IOException e) {
+ throw new ParseException(e.getMessage());
+ }
+ }
+ }
+
+ static class ExtraConfigProcessor implements OptionProcessor {
+ static final Yaml yaml = new Yaml();
+
+ @Override
+ public void process(Config conf, CommandLine commandLine) throws ParseException {
+ for (String s : commandLine.getOptionValues("D")) {
+ String[] keyval = s.split("=", 2);
+ if (keyval.length != 2)
+ throw new ParseException("Invalid option value `" + s + "'");
+
+ conf.putAll((Map) yaml.load(keyval[0] + ": " + keyval[1]));
+ }
+ }
+ }
+
+ static class ConfFileProcessor implements OptionProcessor {
+ static final Yaml yaml = new Yaml();
+
+ static Map loadYamlConf(String f) throws IOException {
+ InputStreamReader reader = null;
+ try {
+ FileInputStream fis = new FileInputStream(f);
+ reader = new InputStreamReader(fis, UTF8);
+ return (Map) yaml.load(reader);
+ } finally {
+ if (reader != null)
+ reader.close();
+ }
+ }
+
+ static Map loadConf(String f) throws IOException {
+ if (f.endsWith(".yaml"))
+ return loadYamlConf(f);
+ throw new IOException("Unknown configuration file type: " + f + " does not end with either .yaml");
+ }
+
+ @Override
+ public void process(Config conf, CommandLine commandLine) throws ParseException {
+ try {
+ for (String f : commandLine.getOptionValues("conf")) {
+ Map m = loadConf(f);
+ if (m == null)
+ throw new ParseException("Empty configuration file " + f);
+ conf.putAll(m);
+ }
+ } catch (IOException e) {
+ throw new ParseException(e.getMessage());
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/ICredentialsListener.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/ICredentialsListener.java b/jstorm-core/src/main/java/backtype/storm/ICredentialsListener.java
new file mode 100755
index 0000000..1a7bc1b
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/ICredentialsListener.java
@@ -0,0 +1,32 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package backtype.storm;
+
+import java.util.Map;
+
+/**
+ * Allows a bolt or a spout to be informed when the credentials of the topology have changed.
+ */
+public interface ICredentialsListener {
+ /**
+ * Called when the credentials of a topology have changed.
+ * @param credentials the new credentials, could be null.
+ */
+ public void setCredentials(Map<String,String> credentials);
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/ILocalCluster.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/ILocalCluster.java b/jstorm-core/src/main/java/backtype/storm/ILocalCluster.java
new file mode 100755
index 0000000..7d5aa35
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/ILocalCluster.java
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import backtype.storm.generated.AlreadyAliveException;
+import backtype.storm.generated.ClusterSummary;
+import backtype.storm.generated.InvalidTopologyException;
+import backtype.storm.generated.KillOptions;
+import backtype.storm.generated.SubmitOptions;
+import backtype.storm.generated.NotAliveException;
+import backtype.storm.generated.RebalanceOptions;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.generated.TopologyInfo;
+import backtype.storm.generated.Credentials;
+
+import java.util.Map;
+
+
+public interface ILocalCluster {
+ void submitTopology(String topologyName, Map conf, StormTopology topology) throws AlreadyAliveException, InvalidTopologyException;
+ void submitTopologyWithOpts(String topologyName, Map conf, StormTopology topology, SubmitOptions submitOpts) throws AlreadyAliveException, InvalidTopologyException;
+ void uploadNewCredentials(String topologyName, Credentials creds);
+ void killTopology(String topologyName) throws NotAliveException;
+ void killTopologyWithOpts(String name, KillOptions options) throws NotAliveException;
+ void activate(String topologyName) throws NotAliveException;
+ void deactivate(String topologyName) throws NotAliveException;
+ void rebalance(String name, RebalanceOptions options) throws NotAliveException;
+ void shutdown();
+ String getTopologyConf(String id);
+ StormTopology getTopology(String id);
+ ClusterSummary getClusterInfo();
+ TopologyInfo getTopologyInfo(String id);
+ Map getState();
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/ILocalDRPC.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/ILocalDRPC.java b/jstorm-core/src/main/java/backtype/storm/ILocalDRPC.java
new file mode 100755
index 0000000..e478dca
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/ILocalDRPC.java
@@ -0,0 +1,27 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import backtype.storm.daemon.Shutdownable;
+import backtype.storm.generated.DistributedRPC;
+import backtype.storm.generated.DistributedRPCInvocations;
+
+
+public interface ILocalDRPC extends DistributedRPC.Iface, DistributedRPCInvocations.Iface, Shutdownable {
+ public String getServiceId();
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/LocalCluster.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/LocalCluster.java b/jstorm-core/src/main/java/backtype/storm/LocalCluster.java
new file mode 100755
index 0000000..b55bac4
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/LocalCluster.java
@@ -0,0 +1,275 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import java.util.Map;
+
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import backtype.storm.generated.ClusterSummary;
+import backtype.storm.generated.Credentials;
+import backtype.storm.generated.KillOptions;
+import backtype.storm.generated.NotAliveException;
+import backtype.storm.generated.RebalanceOptions;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.generated.SubmitOptions;
+import backtype.storm.generated.TopologyInfo;
+import backtype.storm.utils.Utils;
+
+import com.alibaba.jstorm.utils.JStormUtils;
+
+public class LocalCluster implements ILocalCluster {
+
+ public static Logger LOG = LoggerFactory.getLogger(LocalCluster.class);
+
+ private LocalClusterMap state;
+
+ protected void setLogger() {
+ // the code is for log4j
+ // boolean needReset = true;
+ // Logger rootLogger = Logger.getRootLogger();
+ // if (rootLogger != null) {
+ // Enumeration appenders = rootLogger.getAllAppenders();
+ // if (appenders.hasMoreElements() == true) {
+ // needReset = false;
+ // }
+ // }
+ //
+ // if (needReset == true) {
+ // BasicConfigurator.configure();
+ // rootLogger.setLevel(Level.INFO);
+ // }
+
+ }
+
+ // this is easy to debug
+ protected static LocalCluster instance = null;
+
+ public static LocalCluster getInstance() {
+ return instance;
+ }
+
+ public LocalCluster() {
+ synchronized (LocalCluster.class) {
+ if (instance != null) {
+ throw new RuntimeException("LocalCluster should be single");
+ }
+ setLogger();
+
+ // fix in zk occur Address family not supported by protocol family:
+ // connect
+ System.setProperty("java.net.preferIPv4Stack", "true");
+
+ this.state = LocalUtils.prepareLocalCluster();
+ if (this.state == null)
+ throw new RuntimeException("prepareLocalCluster error");
+
+ instance = this;
+ }
+ }
+
+ @Override
+ public void submitTopology(String topologyName, Map conf, StormTopology topology) {
+ submitTopologyWithOpts(topologyName, conf, topology, null);
+ }
+
+ @Override
+ public void submitTopologyWithOpts(String topologyName, Map conf, StormTopology topology, SubmitOptions submitOpts) {
+ // TODO Auto-generated method stub
+ if (!Utils.isValidConf(conf))
+ throw new RuntimeException("Topology conf is not json-serializable");
+ JStormUtils.setLocalMode(true);
+
+ try {
+ if (submitOpts == null) {
+ state.getNimbus().submitTopology(topologyName, null, Utils.to_json(conf), topology);
+ } else {
+ state.getNimbus().submitTopologyWithOpts(topologyName, null, Utils.to_json(conf), topology, submitOpts);
+ }
+
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("Failed to submit topology " + topologyName, e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void killTopology(String topologyName) {
+ // TODO Auto-generated method stub
+ try {
+ // kill topology quickly
+ KillOptions killOps = new KillOptions();
+ killOps.set_wait_secs(0);
+ state.getNimbus().killTopologyWithOpts(topologyName, killOps);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to kill Topology " + topologyName, e);
+ }
+ }
+
+ @Override
+ public void killTopologyWithOpts(String name, KillOptions options) throws NotAliveException {
+ // TODO Auto-generated method stub
+ try {
+ state.getNimbus().killTopologyWithOpts(name, options);
+ } catch (TException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to kill Topology " + name, e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void activate(String topologyName) {
+ // TODO Auto-generated method stub
+ try {
+ state.getNimbus().activate(topologyName);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to activate " + topologyName, e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void deactivate(String topologyName) {
+ // TODO Auto-generated method stub
+ try {
+ state.getNimbus().deactivate(topologyName);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to deactivate " + topologyName, e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void rebalance(String name, RebalanceOptions options) {
+ // TODO Auto-generated method stub
+ try {
+ state.getNimbus().rebalance(name, options);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to rebalance " + name, e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void shutdown() {
+ // TODO Auto-generated method stub
+ // in order to avoid kill topology's command competition
+ // it take 10 seconds to remove topology's node
+ JStormUtils.sleepMs(10 * 1000);
+ this.state.clean();
+ }
+
+ @Override
+ public String getTopologyConf(String id) {
+ // TODO Auto-generated method stub
+ try {
+ return state.getNimbus().getTopologyConf(id);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get topology Conf of topologId: " + id, e);
+ }
+ return null;
+ }
+
+ @Override
+ public StormTopology getTopology(String id) {
+ // TODO Auto-generated method stub
+ try {
+ return state.getNimbus().getTopology(id);
+ } catch (NotAliveException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get topology of topologId: " + id, e);
+ } catch (TException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get topology of topologId: " + id, e);
+ }
+ return null;
+ }
+
+ @Override
+ public ClusterSummary getClusterInfo() {
+ // TODO Auto-generated method stub
+ try {
+ return state.getNimbus().getClusterInfo();
+ } catch (TException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get cluster info", e);
+ }
+ return null;
+ }
+
+ @Override
+ public TopologyInfo getTopologyInfo(String id) {
+ // TODO Auto-generated method stub
+ try {
+ return state.getNimbus().getTopologyInfo(id);
+ } catch (NotAliveException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get topology info of topologyId: " + id, e);
+ } catch (TException e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to get topology info of topologyId: " + id, e);
+ }
+ return null;
+ }
+
+ /***
+ * You should use getLocalClusterMap() to instead.This function will always return null
+ * */
+ @Deprecated
+ @Override
+ public Map getState() {
+ // TODO Auto-generated method stub
+ return null;
+ }
+
+ public LocalClusterMap getLocalClusterMap() {
+ return state;
+ }
+
+ public static void main(String[] args) throws Exception {
+ LocalCluster localCluster = null;
+ try {
+ localCluster = new LocalCluster();
+ } finally {
+ if (localCluster != null) {
+ localCluster.shutdown();
+ }
+ }
+ }
+
+ @Override
+ public void uploadNewCredentials(String topologyName, Credentials creds) {
+ // TODO Auto-generated method stub
+ try {
+ state.getNimbus().uploadNewCredentials(topologyName, creds);
+ } catch (Exception e) {
+ // TODO Auto-generated catch block
+ LOG.error("fail to uploadNewCredentials of topologyId: " + topologyName, e);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/LocalClusterMap.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/LocalClusterMap.java b/jstorm-core/src/main/java/backtype/storm/LocalClusterMap.java
new file mode 100755
index 0000000..bd99c76
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/LocalClusterMap.java
@@ -0,0 +1,126 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Map;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.alibaba.jstorm.daemon.nimbus.NimbusServer;
+import com.alibaba.jstorm.daemon.nimbus.ServiceHandler;
+import com.alibaba.jstorm.daemon.supervisor.SupervisorManger;
+import com.alibaba.jstorm.utils.PathUtils;
+import com.alibaba.jstorm.zk.Factory;
+
+public class LocalClusterMap {
+
+ public static Logger LOG = LoggerFactory.getLogger(LocalClusterMap.class);
+
+ private NimbusServer nimbusServer;
+
+ private ServiceHandler nimbus;
+
+ private Factory zookeeper;
+
+ private Map conf;
+
+ private List<String> tmpDir;
+
+ private SupervisorManger supervisor;
+
+ public ServiceHandler getNimbus() {
+ return nimbus;
+ }
+
+ public void setNimbus(ServiceHandler nimbus) {
+ this.nimbus = nimbus;
+ }
+
+ public Factory getZookeeper() {
+ return zookeeper;
+ }
+
+ public void setZookeeper(Factory zookeeper) {
+ this.zookeeper = zookeeper;
+ }
+
+ public Map getConf() {
+ return conf;
+ }
+
+ public void setConf(Map conf) {
+ this.conf = conf;
+ }
+
+ public NimbusServer getNimbusServer() {
+ return nimbusServer;
+ }
+
+ public void setNimbusServer(NimbusServer nimbusServer) {
+ this.nimbusServer = nimbusServer;
+ }
+
+ public SupervisorManger getSupervisor() {
+ return supervisor;
+ }
+
+ public void setSupervisor(SupervisorManger supervisor) {
+ this.supervisor = supervisor;
+ }
+
+ public List<String> getTmpDir() {
+ return tmpDir;
+ }
+
+ public void setTmpDir(List<String> tmpDir) {
+ this.tmpDir = tmpDir;
+ }
+
+ public void clean() {
+
+ if (supervisor != null) {
+ supervisor.ShutdownAllWorkers();
+ supervisor.shutdown();
+ }
+
+ if (nimbusServer != null) {
+ nimbusServer.cleanup();
+ }
+
+ if (zookeeper != null)
+ zookeeper.shutdown();
+
+ // it will hava a problem:
+ // java.io.IOException: Unable to delete file:
+ // {TmpPath}\{UUID}\version-2\log.1
+ if (tmpDir != null) {
+ for (String dir : tmpDir) {
+ try {
+ PathUtils.rmr(dir);
+ } catch (IOException e) {
+ // TODO Auto-generated catch block
+ LOG.error("Fail to delete " + dir);
+ }
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/LocalDRPC.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/LocalDRPC.java b/jstorm-core/src/main/java/backtype/storm/LocalDRPC.java
new file mode 100755
index 0000000..4113bf4
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/LocalDRPC.java
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import backtype.storm.generated.DRPCRequest;
+import backtype.storm.utils.ServiceRegistry;
+
+import com.alibaba.jstorm.drpc.Drpc;
+
+public class LocalDRPC implements ILocalDRPC {
+ private static final Logger LOG = LoggerFactory.getLogger(LocalDRPC.class);
+
+ private Drpc handler = new Drpc();
+ private Thread thread;
+
+ private final String serviceId;
+
+ public LocalDRPC() {
+
+ thread = new Thread(new Runnable() {
+
+ @Override
+ public void run() {
+ LOG.info("Begin to init local Drpc");
+ try {
+ handler.init();
+ } catch (Exception e) {
+ LOG.info("Failed to start local drpc");
+ System.exit(-1);
+ }
+ LOG.info("Successfully start local drpc");
+ }
+ });
+ thread.start();
+
+ serviceId = ServiceRegistry.registerService(handler);
+ }
+
+ @Override
+ public String execute(String functionName, String funcArgs) {
+ // TODO Auto-generated method stub
+ try {
+ return handler.execute(functionName, funcArgs);
+ } catch (Exception e) {
+ LOG.error("", e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void result(String id, String result) throws TException {
+ // TODO Auto-generated method stub
+ handler.result(id, result);
+ }
+
+ @Override
+ public DRPCRequest fetchRequest(String functionName) throws TException {
+ // TODO Auto-generated method stub
+ return handler.fetchRequest(functionName);
+ }
+
+ @Override
+ public void failRequest(String id) throws TException {
+ // TODO Auto-generated method stub
+ handler.failRequest(id);
+ }
+
+ @Override
+ public void shutdown() {
+ // TODO Auto-generated method stub
+ ServiceRegistry.unregisterService(this.serviceId);
+ this.handler.shutdown();
+ }
+
+ @Override
+ public String getServiceId() {
+ // TODO Auto-generated method stub
+ return serviceId;
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/LocalUtils.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/LocalUtils.java b/jstorm-core/src/main/java/backtype/storm/LocalUtils.java
new file mode 100755
index 0000000..e32c07e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/LocalUtils.java
@@ -0,0 +1,127 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import java.io.File;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.UUID;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import backtype.storm.messaging.IContext;
+import backtype.storm.utils.Utils;
+
+import com.alibaba.jstorm.client.ConfigExtension;
+import com.alibaba.jstorm.daemon.nimbus.DefaultInimbus;
+import com.alibaba.jstorm.daemon.nimbus.NimbusServer;
+import com.alibaba.jstorm.daemon.supervisor.Supervisor;
+import com.alibaba.jstorm.message.netty.NettyContext;
+import com.alibaba.jstorm.zk.Factory;
+import com.alibaba.jstorm.zk.Zookeeper;
+
+public class LocalUtils {
+
+ public static Logger LOG = LoggerFactory.getLogger(LocalUtils.class);
+
+ public static LocalClusterMap prepareLocalCluster() {
+ LocalClusterMap state = new LocalClusterMap();
+ try {
+ List<String> tmpDirs = new ArrayList();
+
+ String zkDir = getTmpDir();
+ tmpDirs.add(zkDir);
+ Factory zookeeper = startLocalZookeeper(zkDir);
+ Map conf = getLocalConf(zookeeper.getZooKeeperServer().getClientPort());
+
+ String nimbusDir = getTmpDir();
+ tmpDirs.add(nimbusDir);
+ Map nimbusConf = deepCopyMap(conf);
+ nimbusConf.put(Config.STORM_LOCAL_DIR, nimbusDir);
+ NimbusServer instance = new NimbusServer();
+
+ Map supervisorConf = deepCopyMap(conf);
+ String supervisorDir = getTmpDir();
+ tmpDirs.add(supervisorDir);
+ supervisorConf.put(Config.STORM_LOCAL_DIR, supervisorDir);
+ Supervisor supervisor = new Supervisor();
+ IContext context = getLocalContext(supervisorConf);
+
+ state.setNimbusServer(instance);
+ state.setNimbus(instance.launcherLocalServer(nimbusConf, new DefaultInimbus()));
+ state.setZookeeper(zookeeper);
+ state.setConf(conf);
+ state.setTmpDir(tmpDirs);
+ state.setSupervisor(supervisor.mkSupervisor(supervisorConf, context));
+ return state;
+ } catch (Exception e) {
+ LOG.error("prepare cluster error!", e);
+ state.clean();
+
+ }
+ return null;
+ }
+
+ private static Factory startLocalZookeeper(String tmpDir) {
+ for (int i = 2000; i < 65535; i++) {
+ try {
+ return Zookeeper.mkInprocessZookeeper(tmpDir, i);
+ } catch (Exception e) {
+ LOG.error("fail to launch zookeeper at port: " + i, e);
+ }
+ }
+ throw new RuntimeException("No port is available to launch an inprocess zookeeper.");
+ }
+
+ private static String getTmpDir() {
+ return System.getProperty("java.io.tmpdir") + File.separator + UUID.randomUUID();
+ }
+
+ private static Map getLocalConf(int port) {
+ List<String> zkServers = new ArrayList<String>(1);
+ zkServers.add("localhost");
+ Map conf = Utils.readStormConfig();
+ conf.put(Config.STORM_CLUSTER_MODE, "local");
+ conf.put(Config.STORM_ZOOKEEPER_SERVERS, zkServers);
+ conf.put(Config.STORM_ZOOKEEPER_PORT, port);
+ conf.put(Config.TOPOLOGY_SKIP_MISSING_KRYO_REGISTRATIONS, true);
+ conf.put(Config.ZMQ_LINGER_MILLIS, 0);
+ conf.put(Config.TOPOLOGY_ENABLE_MESSAGE_TIMEOUTS, false);
+ conf.put(Config.TOPOLOGY_TRIDENT_BATCH_EMIT_INTERVAL_MILLIS, 50);
+ ConfigExtension.setSpoutDelayRunSeconds(conf, 0);
+ ConfigExtension.setTaskCleanupTimeoutSec(conf, 0);
+ return conf;
+ }
+
+ private static IContext getLocalContext(Map conf) {
+ if (!(Boolean) conf.get(Config.STORM_LOCAL_MODE_ZMQ)) {
+ IContext result = new NettyContext();
+ ConfigExtension.setLocalWorkerPort(conf, 6800);
+ result.prepare(conf);
+ return result;
+ }
+ return null;
+ }
+
+ private static Map deepCopyMap(Map map) {
+ return new HashMap(map);
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/StormSubmitter.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/StormSubmitter.java b/jstorm-core/src/main/java/backtype/storm/StormSubmitter.java
new file mode 100644
index 0000000..400875e
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/StormSubmitter.java
@@ -0,0 +1,366 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.thrift.TException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import backtype.storm.generated.AlreadyAliveException;
+import backtype.storm.generated.InvalidTopologyException;
+import backtype.storm.generated.Nimbus;
+import backtype.storm.generated.NotAliveException;
+import backtype.storm.generated.StormTopology;
+import backtype.storm.generated.SubmitOptions;
+import backtype.storm.generated.TopologyAssignException;
+import backtype.storm.utils.BufferFileInputStream;
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Use this class to submit topologies to run on the Storm cluster. You should
+ * run your program with the "storm jar" command from the command-line, and then
+ * use this class to submit your topologies.
+ */
+public class StormSubmitter {
+ public static Logger LOG = LoggerFactory.getLogger(StormSubmitter.class);
+
+ private static Nimbus.Iface localNimbus = null;
+
+ public static void setLocalNimbus(Nimbus.Iface localNimbusHandler) {
+ StormSubmitter.localNimbus = localNimbusHandler;
+ }
+
+ /**
+ * Submits a topology to run on the cluster. A topology runs forever or
+ * until explicitly killed.
+ *
+ *
+ * @param name the name of the storm.
+ * @param stormConf the topology-specific configuration. See {@link Config}.
+ * @param topology the processing to execute.
+ * @throws AlreadyAliveException if a topology with this name is already
+ * running
+ * @throws InvalidTopologyException if an invalid topology was submitted
+ */
+ public static void submitTopology(String name, Map stormConf,
+ StormTopology topology) throws AlreadyAliveException,
+ InvalidTopologyException {
+ submitTopology(name, stormConf, topology, null);
+ }
+
+ public static void submitTopology(String name, Map stormConf,
+ StormTopology topology, SubmitOptions opts, List<File> jarFiles)
+ throws AlreadyAliveException, InvalidTopologyException {
+ if (jarFiles == null) {
+ jarFiles = new ArrayList<File>();
+ }
+ Map<String, String> jars = new HashMap<String, String>(jarFiles.size());
+ List<String> names = new ArrayList<String>(jarFiles.size());
+
+ for (File f : jarFiles) {
+ if (!f.exists()) {
+ LOG.info(f.getName() + " is not existed: "
+ + f.getAbsolutePath());
+ continue;
+ }
+ jars.put(f.getName(), f.getAbsolutePath());
+ names.add(f.getName());
+ }
+ LOG.info("Files: " + names + " will be loaded");
+ stormConf.put(GenericOptionsParser.TOPOLOGY_LIB_PATH, jars);
+ stormConf.put(GenericOptionsParser.TOPOLOGY_LIB_NAME, names);
+ submitTopology(name, stormConf, topology, opts);
+ }
+
+ public static void submitTopology(String name, Map stormConf,
+ StormTopology topology, SubmitOptions opts,
+ ProgressListener listener) throws AlreadyAliveException,
+ InvalidTopologyException {
+ submitTopology(name, stormConf, topology, opts);
+ }
+
+ /**
+ * Submits a topology to run on the cluster. A topology runs forever or
+ * until explicitly killed.
+ *
+ *
+ * @param name the name of the storm.
+ * @param stormConf the topology-specific configuration. See {@link Config}.
+ * @param topology the processing to execute.
+ * @param options to manipulate the starting of the topology
+ * @throws AlreadyAliveException if a topology with this name is already
+ * running
+ * @throws InvalidTopologyException if an invalid topology was submitted
+ */
+ public static void submitTopology(String name, Map stormConf,
+ StormTopology topology, SubmitOptions opts)
+ throws AlreadyAliveException, InvalidTopologyException {
+ if (!Utils.isValidConf(stormConf)) {
+ throw new IllegalArgumentException(
+ "Storm conf is not valid. Must be json-serializable");
+ }
+ stormConf = new HashMap(stormConf);
+ stormConf.putAll(Utils.readCommandLineOpts());
+ Map conf = Utils.readStormConfig();
+ conf.putAll(stormConf);
+ putUserInfo(conf, stormConf);
+ try {
+ String serConf = Utils.to_json(stormConf);
+ if (localNimbus != null) {
+ LOG.info("Submitting topology " + name + " in local mode");
+ localNimbus.submitTopology(name, null, serConf, topology);
+ } else {
+ NimbusClient client = NimbusClient.getConfiguredClient(conf);
+ try {
+ if (topologyNameExists(client, conf, name)) {
+ throw new RuntimeException("Topology with name `" + name
+ + "` already exists on cluster");
+ }
+
+ submitJar(client, conf);
+ LOG.info("Submitting topology " + name
+ + " in distributed mode with conf " + serConf);
+ if (opts != null) {
+ client.getClient().submitTopologyWithOpts(name, path,
+ serConf, topology, opts);
+ } else {
+ // this is for backwards compatibility
+ client.getClient().submitTopology(name, path, serConf,
+ topology);
+ }
+ } finally {
+ client.close();
+ }
+ }
+ LOG.info("Finished submitting topology: " + name);
+ } catch (InvalidTopologyException e) {
+ LOG.warn("Topology submission exception", e);
+ throw e;
+ } catch (AlreadyAliveException e) {
+ LOG.warn("Topology already alive exception", e);
+ throw e;
+ } catch (TopologyAssignException e) {
+ LOG.warn("Failed to assign " + e.get_msg(), e);
+ throw new RuntimeException(e);
+ } catch (TException e) {
+ LOG.warn("Failed to assign ", e);
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Submits a topology to run on the cluster with a progress bar. A topology
+ * runs forever or until explicitly killed.
+ *
+ *
+ * @param name the name of the storm.
+ * @param stormConf the topology-specific configuration. See {@link Config}.
+ * @param topology the processing to execute.
+ * @throws AlreadyAliveException if a topology with this name is already
+ * running
+ * @throws InvalidTopologyException if an invalid topology was submitted
+ * @throws TopologyAssignException
+ */
+
+ public static void submitTopologyWithProgressBar(String name,
+ Map stormConf, StormTopology topology)
+ throws AlreadyAliveException, InvalidTopologyException {
+ submitTopologyWithProgressBar(name, stormConf, topology, null);
+ }
+
+ /**
+ * Submits a topology to run on the cluster with a progress bar. A topology
+ * runs forever or until explicitly killed.
+ *
+ *
+ * @param name the name of the storm.
+ * @param stormConf the topology-specific configuration. See {@link Config}.
+ * @param topology the processing to execute.
+ * @param opts to manipulate the starting of the topology
+ * @throws AlreadyAliveException if a topology with this name is already
+ * running
+ * @throws InvalidTopologyException if an invalid topology was submitted
+ * @throws TopologyAssignException
+ */
+
+ public static void submitTopologyWithProgressBar(String name,
+ Map stormConf, StormTopology topology, SubmitOptions opts)
+ throws AlreadyAliveException, InvalidTopologyException {
+
+ /**
+ * remove progress bar in jstorm
+ */
+ submitTopology(name, stormConf, topology, opts);
+ }
+
+ public static boolean topologyNameExists(NimbusClient client, Map conf, String name) {
+ if (StringUtils.isBlank(name)) {
+ throw new RuntimeException("TopologyName is empty");
+ }
+
+ try {
+ String topologyId = client.getClient().getTopologyId(name);
+ if (StringUtils.isBlank(topologyId) == false) {
+ return true;
+ }
+ return false;
+
+ } catch (NotAliveException e) {
+ return false;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ private static String submittedJar = null;
+ private static String path = null;
+
+ private static void submitJar(NimbusClient client, Map conf) {
+ if (submittedJar == null) {
+ try {
+ LOG.info("Jar not uploaded to master yet. Submitting jar...");
+ String localJar = System.getProperty("storm.jar");
+ path = client.getClient().beginFileUpload();
+ String[] pathCache = path.split("/");
+ String uploadLocation =
+ path + "/stormjar-" + pathCache[pathCache.length - 1]
+ + ".jar";
+ List<String> lib =
+ (List<String>) conf
+ .get(GenericOptionsParser.TOPOLOGY_LIB_NAME);
+ Map<String, String> libPath =
+ (Map<String, String>) conf
+ .get(GenericOptionsParser.TOPOLOGY_LIB_PATH);
+ if (lib != null && lib.size() != 0) {
+ for (String libName : lib) {
+ String jarPath = path + "/lib/" + libName;
+ client.getClient().beginLibUpload(jarPath);
+ submitJar(conf, libPath.get(libName), jarPath, client);
+ }
+
+ } else {
+ if (localJar == null) {
+ // no lib, no client jar
+ throw new RuntimeException(
+ "No client app jar, please upload it");
+ }
+ }
+
+ if (localJar != null) {
+ submittedJar =
+ submitJar(conf, localJar, uploadLocation, client);
+ } else {
+ // no client jar, but with lib jar
+ client.getClient().finishFileUpload(uploadLocation);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ } else {
+ LOG.info("Jar already uploaded to master. Not submitting jar.");
+ }
+ }
+
+ public static String submitJar(Map conf, String localJar,
+ String uploadLocation, NimbusClient client) {
+ if (localJar == null) {
+ throw new RuntimeException(
+ "Must submit topologies using the 'storm' client script so that StormSubmitter knows which jar to upload.");
+ }
+
+ try {
+
+ LOG.info("Uploading topology jar " + localJar
+ + " to assigned location: " + uploadLocation);
+ int bufferSize = 512 * 1024;
+ Object maxBufSizeObject =
+ conf.get(Config.NIMBUS_THRIFT_MAX_BUFFER_SIZE);
+ if (maxBufSizeObject != null) {
+ bufferSize = Utils.getInt(maxBufSizeObject) / 2;
+ }
+
+ BufferFileInputStream is =
+ new BufferFileInputStream(localJar, bufferSize);
+ while (true) {
+ byte[] toSubmit = is.read();
+ if (toSubmit.length == 0)
+ break;
+ client.getClient().uploadChunk(uploadLocation,
+ ByteBuffer.wrap(toSubmit));
+ }
+ client.getClient().finishFileUpload(uploadLocation);
+ LOG.info("Successfully uploaded topology jar to assigned location: "
+ + uploadLocation);
+ return uploadLocation;
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ } finally {
+
+ }
+ }
+
+ private static void putUserInfo(Map conf, Map stormConf) {
+ stormConf.put("user.group", conf.get("user.group"));
+ stormConf.put("user.name", conf.get("user.name"));
+ stormConf.put("user.password", conf.get("user.password"));
+ }
+
+ /**
+ * Interface use to track progress of file upload
+ */
+ public interface ProgressListener {
+ /**
+ * called before file is uploaded
+ *
+ * @param srcFile - jar file to be uploaded
+ * @param targetFile - destination file
+ * @param totalBytes - total number of bytes of the file
+ */
+ public void onStart(String srcFile, String targetFile, long totalBytes);
+
+ /**
+ * called whenever a chunk of bytes is uploaded
+ *
+ * @param srcFile - jar file to be uploaded
+ * @param targetFile - destination file
+ * @param bytesUploaded - number of bytes transferred so far
+ * @param totalBytes - total number of bytes of the file
+ */
+ public void onProgress(String srcFile, String targetFile,
+ long bytesUploaded, long totalBytes);
+
+ /**
+ * called when the file is uploaded
+ *
+ * @param srcFile - jar file to be uploaded
+ * @param targetFile - destination file
+ * @param totalBytes - total number of bytes of the file
+ */
+ public void onCompleted(String srcFile, String targetFile,
+ long totalBytes);
+ }
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/Tool.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/Tool.java b/jstorm-core/src/main/java/backtype/storm/Tool.java
new file mode 100755
index 0000000..6722b24
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/Tool.java
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+/**
+ * A tool abstract class that supports handling of generic command-line options.
+ *
+ * <p>
+ * Here is how a typical <code>Tool</code> is implemented:
+ * </p>
+ * <p>
+ * <blockquote>
+ *
+ * <pre>
+ * public class TopologyApp extends Tool {
+ * {@literal @}Override
+ * public int run(String[] args) throws Exception {
+ * // Config processed by ToolRunner
+ * Config conf = getConf();
+ *
+ * // Other setups go here
+ * String name = "topology";
+ * StormTopology topology = buildTopology(args);
+ * StormSubmitter.submitTopology(name, conf, topology);
+ * return 0;
+ * }
+ *
+ * StormTopology buildTopology(String[] args) { ... }
+ *
+ * public static void main(String[] args) throws Exception {
+ * // Use ToolRunner to handle generic command-line options
+ * ToolRunner.run(new TopologyApp(), args);
+ * }
+ * }
+ * </pre>
+ *
+ * </blockquote>
+ * </p>
+ *
+ * @see GenericOptionsParser
+ * @see ToolRunner
+ */
+
+public abstract class Tool {
+ Config config;
+
+ public abstract int run(String[] args) throws Exception;
+
+ public Config getConf() {
+ return config;
+ }
+
+ public void setConf(Config config) {
+ this.config = config;
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/ToolRunner.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/ToolRunner.java b/jstorm-core/src/main/java/backtype/storm/ToolRunner.java
new file mode 100755
index 0000000..33f5034
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/ToolRunner.java
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.commons.cli.ParseException;
+
+import backtype.storm.utils.Utils;
+
+/**
+ * A utility to help run {@link Tool}s
+ *
+ * <p>
+ * <code>ToolRunner</code> can be used to run classes extending the <code>Tool</code> abstract class. It works in conjunction with {@link GenericOptionsParser} to parse the <a
+ * href="{@docRoot} to parse the <a href="{@docRoot} to parse the <a href="{@docRoot} to parse the <a
+ * href="{@docRoot}
+ * to parse the <a href="{@docRoot} to parse the <a href="{@docRoot} to parse the <a href="{@docRoot} to parse the <a href="{@docRoot}
+ * /backtype/storm/GenericOptionsParser.html#GenericOptions"> generic storm command line arguments</a> and modifies the <code>Config</code> of the
+ * <code>Tool</code>. The application-specific options are passed along without being modified.
+ *
+ * @see Tool
+ * @see GenericOptionsParser
+ */
+
+public class ToolRunner {
+ static final Logger LOG = LoggerFactory.getLogger(ToolRunner.class);
+
+ public static void run(Tool tool, String[] args) {
+ run(tool.getConf(), tool, args);
+ }
+
+ public static void run(Config conf, Tool tool, String[] args) {
+ try {
+ if (conf == null) {
+ conf = new Config();
+ conf.putAll(Utils.readStormConfig());
+ }
+
+ GenericOptionsParser parser = new GenericOptionsParser(conf, args);
+ tool.setConf(conf);
+
+ System.exit(tool.run(parser.getRemainingArgs()));
+ } catch (ParseException e) {
+ LOG.error("Error parsing generic options: {}", e.getMessage());
+ GenericOptionsParser.printGenericCommandUsage(System.err);
+ System.exit(2);
+ } catch (Exception e) {
+ LOG.error("Error running tool", e);
+ System.exit(1);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/clojure/ClojureBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/clojure/ClojureBolt.java b/jstorm-core/src/main/java/backtype/storm/clojure/ClojureBolt.java
new file mode 100755
index 0000000..5de9bde
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/clojure/ClojureBolt.java
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.clojure;
+
+import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
+import backtype.storm.generated.StreamInfo;
+import backtype.storm.task.IBolt;
+import backtype.storm.task.OutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.tuple.Tuple;
+import backtype.storm.utils.Utils;
+import clojure.lang.IFn;
+import clojure.lang.PersistentArrayMap;
+import clojure.lang.Keyword;
+import clojure.lang.Symbol;
+import clojure.lang.RT;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+
+public class ClojureBolt implements IRichBolt, FinishedCallback {
+ Map<String, StreamInfo> _fields;
+ List<String> _fnSpec;
+ List<String> _confSpec;
+ List<Object> _params;
+
+ IBolt _bolt;
+
+ public ClojureBolt(List fnSpec, List confSpec, List<Object> params, Map<String, StreamInfo> fields) {
+ _fnSpec = fnSpec;
+ _confSpec = confSpec;
+ _params = params;
+ _fields = fields;
+ }
+
+ @Override
+ public void prepare(final Map stormConf, final TopologyContext context, final OutputCollector collector) {
+ IFn hof = Utils.loadClojureFn(_fnSpec.get(0), _fnSpec.get(1));
+ try {
+ IFn preparer = (IFn) hof.applyTo(RT.seq(_params));
+ final Map<Keyword,Object> collectorMap = new PersistentArrayMap( new Object[] {
+ Keyword.intern(Symbol.create("output-collector")), collector,
+ Keyword.intern(Symbol.create("context")), context});
+ List<Object> args = new ArrayList<Object>() {{
+ add(stormConf);
+ add(context);
+ add(collectorMap);
+ }};
+
+ _bolt = (IBolt) preparer.applyTo(RT.seq(args));
+ //this is kind of unnecessary for clojure
+ try {
+ _bolt.prepare(stormConf, context, collector);
+ } catch(AbstractMethodError ame) {
+
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void execute(Tuple input) {
+ _bolt.execute(input);
+ }
+
+ @Override
+ public void cleanup() {
+ try {
+ _bolt.cleanup();
+ } catch(AbstractMethodError ame) {
+
+ }
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ for(String stream: _fields.keySet()) {
+ StreamInfo info = _fields.get(stream);
+ declarer.declareStream(stream, info.is_direct(), new Fields(info.get_output_fields()));
+ }
+ }
+
+ @Override
+ public void finishedId(Object id) {
+ if(_bolt instanceof FinishedCallback) {
+ ((FinishedCallback) _bolt).finishedId(id);
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ IFn hof = Utils.loadClojureFn(_confSpec.get(0), _confSpec.get(1));
+ try {
+ return (Map) hof.applyTo(RT.seq(_params));
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/clojure/ClojureSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/clojure/ClojureSpout.java b/jstorm-core/src/main/java/backtype/storm/clojure/ClojureSpout.java
new file mode 100755
index 0000000..f6422e3
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/clojure/ClojureSpout.java
@@ -0,0 +1,153 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.clojure;
+
+import backtype.storm.generated.StreamInfo;
+import backtype.storm.spout.ISpout;
+import backtype.storm.spout.SpoutOutputCollector;
+import backtype.storm.task.TopologyContext;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import backtype.storm.utils.Utils;
+import clojure.lang.IFn;
+import clojure.lang.PersistentArrayMap;
+import clojure.lang.Keyword;
+import clojure.lang.Symbol;
+import clojure.lang.RT;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+public class ClojureSpout implements IRichSpout {
+ Map<String, StreamInfo> _fields;
+ List<String> _fnSpec;
+ List<String> _confSpec;
+ List<Object> _params;
+
+ ISpout _spout;
+
+ public ClojureSpout(List fnSpec, List confSpec, List<Object> params, Map<String, StreamInfo> fields) {
+ _fnSpec = fnSpec;
+ _confSpec = confSpec;
+ _params = params;
+ _fields = fields;
+ }
+
+
+ @Override
+ public void open(final Map conf, final TopologyContext context, final SpoutOutputCollector collector) {
+ IFn hof = Utils.loadClojureFn(_fnSpec.get(0), _fnSpec.get(1));
+ try {
+ IFn preparer = (IFn) hof.applyTo(RT.seq(_params));
+ final Map<Keyword,Object> collectorMap = new PersistentArrayMap( new Object[] {
+ Keyword.intern(Symbol.create("output-collector")), collector,
+ Keyword.intern(Symbol.create("context")), context});
+ List<Object> args = new ArrayList<Object>() {{
+ add(conf);
+ add(context);
+ add(collectorMap);
+ }};
+
+ _spout = (ISpout) preparer.applyTo(RT.seq(args));
+ //this is kind of unnecessary for clojure
+ try {
+ _spout.open(conf, context, collector);
+ } catch(AbstractMethodError ame) {
+
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void close() {
+ try {
+ _spout.close();
+ } catch(AbstractMethodError ame) {
+
+ }
+ }
+
+ @Override
+ public void nextTuple() {
+ try {
+ _spout.nextTuple();
+ } catch(AbstractMethodError ame) {
+
+ }
+
+ }
+
+ @Override
+ public void ack(Object msgId) {
+ try {
+ _spout.ack(msgId);
+ } catch(AbstractMethodError ame) {
+
+ }
+
+ }
+
+ @Override
+ public void fail(Object msgId) {
+ try {
+ _spout.fail(msgId);
+ } catch(AbstractMethodError ame) {
+
+ }
+
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ for(String stream: _fields.keySet()) {
+ StreamInfo info = _fields.get(stream);
+ declarer.declareStream(stream, info.is_direct(), new Fields(info.get_output_fields()));
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ IFn hof = Utils.loadClojureFn(_confSpec.get(0), _confSpec.get(1));
+ try {
+ return (Map) hof.applyTo(RT.seq(_params));
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ @Override
+ public void activate() {
+ try {
+ _spout.activate();
+ } catch(AbstractMethodError ame) {
+
+ }
+ }
+
+ @Override
+ public void deactivate() {
+ try {
+ _spout.deactivate();
+ } catch(AbstractMethodError ame) {
+
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/clojure/RichShellBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/clojure/RichShellBolt.java b/jstorm-core/src/main/java/backtype/storm/clojure/RichShellBolt.java
new file mode 100755
index 0000000..a155008
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/clojure/RichShellBolt.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.clojure;
+
+import backtype.storm.generated.StreamInfo;
+import backtype.storm.task.ShellBolt;
+import backtype.storm.topology.IRichBolt;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import java.util.Map;
+
+public class RichShellBolt extends ShellBolt implements IRichBolt {
+ private Map<String, StreamInfo> _outputs;
+
+ public RichShellBolt(String[] command, Map<String, StreamInfo> outputs) {
+ super(command);
+ _outputs = outputs;
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ for(String stream: _outputs.keySet()) {
+ StreamInfo def = _outputs.get(stream);
+ if(def.is_direct()) {
+ declarer.declareStream(stream, true, new Fields(def.get_output_fields()));
+ } else {
+ declarer.declareStream(stream, new Fields(def.get_output_fields()));
+ }
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ return null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/clojure/RichShellSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/clojure/RichShellSpout.java b/jstorm-core/src/main/java/backtype/storm/clojure/RichShellSpout.java
new file mode 100755
index 0000000..b49fbef
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/clojure/RichShellSpout.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.clojure;
+
+import backtype.storm.generated.StreamInfo;
+import backtype.storm.spout.ShellSpout;
+import backtype.storm.topology.IRichSpout;
+import backtype.storm.topology.OutputFieldsDeclarer;
+import backtype.storm.tuple.Fields;
+import java.util.Map;
+
+public class RichShellSpout extends ShellSpout implements IRichSpout {
+ private Map<String, StreamInfo> _outputs;
+
+ public RichShellSpout(String[] command, Map<String, StreamInfo> outputs) {
+ super(command);
+ _outputs = outputs;
+ }
+
+ @Override
+ public void declareOutputFields(OutputFieldsDeclarer declarer) {
+ for(String stream: _outputs.keySet()) {
+ StreamInfo def = _outputs.get(stream);
+ if(def.is_direct()) {
+ declarer.declareStream(stream, true, new Fields(def.get_output_fields()));
+ } else {
+ declarer.declareStream(stream, new Fields(def.get_output_fields()));
+ }
+ }
+ }
+
+ @Override
+ public Map<String, Object> getComponentConfiguration() {
+ return null;
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/activate.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/activate.java b/jstorm-core/src/main/java/backtype/storm/command/activate.java
new file mode 100755
index 0000000..ed12e09
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/activate.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Activate topology
+ *
+ * @author longda
+ *
+ */
+public class activate {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ throw new InvalidParameterException("Should input topology name");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ client.getClient().activate(topologyName);
+
+ System.out.println("Successfully submit command activate " + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/config_value.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/config_value.java b/jstorm-core/src/main/java/backtype/storm/command/config_value.java
new file mode 100755
index 0000000..868ffdc
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/config_value.java
@@ -0,0 +1,50 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Get configuration
+ *
+ * @author longda
+ *
+ */
+public class config_value {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ throw new InvalidParameterException("Should input key name");
+ }
+
+ String key = args[0];
+
+ Map conf = Utils.readStormConfig();
+
+ System.out.print("VALUE: " + String.valueOf(conf.get(key)));
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/deactivate.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/deactivate.java b/jstorm-core/src/main/java/backtype/storm/command/deactivate.java
new file mode 100755
index 0000000..22ac20d
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/deactivate.java
@@ -0,0 +1,65 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Deactivate topology
+ *
+ * @author longda
+ *
+ */
+public class deactivate {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ throw new InvalidParameterException("Should input topology name");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ client.getClient().deactivate(topologyName);
+
+ System.out.println("Successfully submit command deactivate " + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-core/src/main/java/backtype/storm/command/kill_topology.java
----------------------------------------------------------------------
diff --git a/jstorm-core/src/main/java/backtype/storm/command/kill_topology.java b/jstorm-core/src/main/java/backtype/storm/command/kill_topology.java
new file mode 100755
index 0000000..4ab3893
--- /dev/null
+++ b/jstorm-core/src/main/java/backtype/storm/command/kill_topology.java
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.command;
+
+import java.security.InvalidParameterException;
+import java.util.Map;
+
+import backtype.storm.generated.KillOptions;
+import backtype.storm.utils.NimbusClient;
+import backtype.storm.utils.Utils;
+
+/**
+ * Kill topology
+ *
+ * @author longda
+ *
+ */
+public class kill_topology {
+
+ /**
+ * @param args
+ */
+ public static void main(String[] args) {
+ // TODO Auto-generated method stub
+ if (args == null || args.length == 0) {
+ throw new InvalidParameterException("Should input topology name");
+ }
+
+ String topologyName = args[0];
+
+ NimbusClient client = null;
+ try {
+
+ Map conf = Utils.readStormConfig();
+ client = NimbusClient.getConfiguredClient(conf);
+
+ if (args.length == 1) {
+
+ client.getClient().killTopology(topologyName);
+ } else {
+ int delaySeconds = Integer.parseInt(args[1]);
+
+ KillOptions options = new KillOptions();
+ options.set_wait_secs(delaySeconds);
+
+ client.getClient().killTopologyWithOpts(topologyName, options);
+
+ }
+
+ System.out.println("Successfully submit command kill " + topologyName);
+ } catch (Exception e) {
+ System.out.println(e.getMessage());
+ e.printStackTrace();
+ throw new RuntimeException(e);
+ } finally {
+ if (client != null) {
+ client.close();
+ }
+ }
+ }
+
+}
[34/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/StateSpoutSpec.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/StateSpoutSpec.java b/jstorm-client/src/main/java/backtype/storm/generated/StateSpoutSpec.java
deleted file mode 100644
index 4d1baf3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/StateSpoutSpec.java
+++ /dev/null
@@ -1,427 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class StateSpoutSpec implements org.apache.thrift7.TBase<StateSpoutSpec, StateSpoutSpec._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("StateSpoutSpec");
-
- private static final org.apache.thrift7.protocol.TField STATE_SPOUT_OBJECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("state_spout_object", org.apache.thrift7.protocol.TType.STRUCT, (short)1);
- private static final org.apache.thrift7.protocol.TField COMMON_FIELD_DESC = new org.apache.thrift7.protocol.TField("common", org.apache.thrift7.protocol.TType.STRUCT, (short)2);
-
- private ComponentObject state_spout_object; // required
- private ComponentCommon common; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- STATE_SPOUT_OBJECT((short)1, "state_spout_object"),
- COMMON((short)2, "common");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // STATE_SPOUT_OBJECT
- return STATE_SPOUT_OBJECT;
- case 2: // COMMON
- return COMMON;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.STATE_SPOUT_OBJECT, new org.apache.thrift7.meta_data.FieldMetaData("state_spout_object", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentObject.class)));
- tmpMap.put(_Fields.COMMON, new org.apache.thrift7.meta_data.FieldMetaData("common", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, ComponentCommon.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(StateSpoutSpec.class, metaDataMap);
- }
-
- public StateSpoutSpec() {
- }
-
- public StateSpoutSpec(
- ComponentObject state_spout_object,
- ComponentCommon common)
- {
- this();
- this.state_spout_object = state_spout_object;
- this.common = common;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public StateSpoutSpec(StateSpoutSpec other) {
- if (other.is_set_state_spout_object()) {
- this.state_spout_object = new ComponentObject(other.state_spout_object);
- }
- if (other.is_set_common()) {
- this.common = new ComponentCommon(other.common);
- }
- }
-
- public StateSpoutSpec deepCopy() {
- return new StateSpoutSpec(this);
- }
-
- @Override
- public void clear() {
- this.state_spout_object = null;
- this.common = null;
- }
-
- public ComponentObject get_state_spout_object() {
- return this.state_spout_object;
- }
-
- public void set_state_spout_object(ComponentObject state_spout_object) {
- this.state_spout_object = state_spout_object;
- }
-
- public void unset_state_spout_object() {
- this.state_spout_object = null;
- }
-
- /** Returns true if field state_spout_object is set (has been assigned a value) and false otherwise */
- public boolean is_set_state_spout_object() {
- return this.state_spout_object != null;
- }
-
- public void set_state_spout_object_isSet(boolean value) {
- if (!value) {
- this.state_spout_object = null;
- }
- }
-
- public ComponentCommon get_common() {
- return this.common;
- }
-
- public void set_common(ComponentCommon common) {
- this.common = common;
- }
-
- public void unset_common() {
- this.common = null;
- }
-
- /** Returns true if field common is set (has been assigned a value) and false otherwise */
- public boolean is_set_common() {
- return this.common != null;
- }
-
- public void set_common_isSet(boolean value) {
- if (!value) {
- this.common = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case STATE_SPOUT_OBJECT:
- if (value == null) {
- unset_state_spout_object();
- } else {
- set_state_spout_object((ComponentObject)value);
- }
- break;
-
- case COMMON:
- if (value == null) {
- unset_common();
- } else {
- set_common((ComponentCommon)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case STATE_SPOUT_OBJECT:
- return get_state_spout_object();
-
- case COMMON:
- return get_common();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case STATE_SPOUT_OBJECT:
- return is_set_state_spout_object();
- case COMMON:
- return is_set_common();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof StateSpoutSpec)
- return this.equals((StateSpoutSpec)that);
- return false;
- }
-
- public boolean equals(StateSpoutSpec that) {
- if (that == null)
- return false;
-
- boolean this_present_state_spout_object = true && this.is_set_state_spout_object();
- boolean that_present_state_spout_object = true && that.is_set_state_spout_object();
- if (this_present_state_spout_object || that_present_state_spout_object) {
- if (!(this_present_state_spout_object && that_present_state_spout_object))
- return false;
- if (!this.state_spout_object.equals(that.state_spout_object))
- return false;
- }
-
- boolean this_present_common = true && this.is_set_common();
- boolean that_present_common = true && that.is_set_common();
- if (this_present_common || that_present_common) {
- if (!(this_present_common && that_present_common))
- return false;
- if (!this.common.equals(that.common))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_state_spout_object = true && (is_set_state_spout_object());
- builder.append(present_state_spout_object);
- if (present_state_spout_object)
- builder.append(state_spout_object);
-
- boolean present_common = true && (is_set_common());
- builder.append(present_common);
- if (present_common)
- builder.append(common);
-
- return builder.toHashCode();
- }
-
- public int compareTo(StateSpoutSpec other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- StateSpoutSpec typedOther = (StateSpoutSpec)other;
-
- lastComparison = Boolean.valueOf(is_set_state_spout_object()).compareTo(typedOther.is_set_state_spout_object());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_state_spout_object()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.state_spout_object, typedOther.state_spout_object);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_common()).compareTo(typedOther.is_set_common());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_common()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.common, typedOther.common);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // STATE_SPOUT_OBJECT
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.state_spout_object = new ComponentObject();
- this.state_spout_object.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // COMMON
- if (field.type == org.apache.thrift7.protocol.TType.STRUCT) {
- this.common = new ComponentCommon();
- this.common.read(iprot);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.state_spout_object != null) {
- oprot.writeFieldBegin(STATE_SPOUT_OBJECT_FIELD_DESC);
- this.state_spout_object.write(oprot);
- oprot.writeFieldEnd();
- }
- if (this.common != null) {
- oprot.writeFieldBegin(COMMON_FIELD_DESC);
- this.common.write(oprot);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("StateSpoutSpec(");
- boolean first = true;
-
- sb.append("state_spout_object:");
- if (this.state_spout_object == null) {
- sb.append("null");
- } else {
- sb.append(this.state_spout_object);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("common:");
- if (this.common == null) {
- sb.append("null");
- } else {
- sb.append(this.common);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_state_spout_object()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'state_spout_object' is unset! Struct:" + toString());
- }
-
- if (!is_set_common()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'common' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/StormTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/StormTopology.java b/jstorm-client/src/main/java/backtype/storm/generated/StormTopology.java
deleted file mode 100644
index bfe2984..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/StormTopology.java
+++ /dev/null
@@ -1,660 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class StormTopology implements org.apache.thrift7.TBase<StormTopology, StormTopology._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("StormTopology");
-
- private static final org.apache.thrift7.protocol.TField SPOUTS_FIELD_DESC = new org.apache.thrift7.protocol.TField("spouts", org.apache.thrift7.protocol.TType.MAP, (short)1);
- private static final org.apache.thrift7.protocol.TField BOLTS_FIELD_DESC = new org.apache.thrift7.protocol.TField("bolts", org.apache.thrift7.protocol.TType.MAP, (short)2);
- private static final org.apache.thrift7.protocol.TField STATE_SPOUTS_FIELD_DESC = new org.apache.thrift7.protocol.TField("state_spouts", org.apache.thrift7.protocol.TType.MAP, (short)3);
-
- private Map<String,SpoutSpec> spouts; // required
- private Map<String,Bolt> bolts; // required
- private Map<String,StateSpoutSpec> state_spouts; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- SPOUTS((short)1, "spouts"),
- BOLTS((short)2, "bolts"),
- STATE_SPOUTS((short)3, "state_spouts");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // SPOUTS
- return SPOUTS;
- case 2: // BOLTS
- return BOLTS;
- case 3: // STATE_SPOUTS
- return STATE_SPOUTS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.SPOUTS, new org.apache.thrift7.meta_data.FieldMetaData("spouts", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, SpoutSpec.class))));
- tmpMap.put(_Fields.BOLTS, new org.apache.thrift7.meta_data.FieldMetaData("bolts", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, Bolt.class))));
- tmpMap.put(_Fields.STATE_SPOUTS, new org.apache.thrift7.meta_data.FieldMetaData("state_spouts", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.MapMetaData(org.apache.thrift7.protocol.TType.MAP,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING),
- new org.apache.thrift7.meta_data.StructMetaData(org.apache.thrift7.protocol.TType.STRUCT, StateSpoutSpec.class))));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(StormTopology.class, metaDataMap);
- }
-
- public StormTopology() {
- }
-
- public StormTopology(
- Map<String,SpoutSpec> spouts,
- Map<String,Bolt> bolts,
- Map<String,StateSpoutSpec> state_spouts)
- {
- this();
- this.spouts = spouts;
- this.bolts = bolts;
- this.state_spouts = state_spouts;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public StormTopology(StormTopology other) {
- if (other.is_set_spouts()) {
- Map<String,SpoutSpec> __this__spouts = new HashMap<String,SpoutSpec>();
- for (Map.Entry<String, SpoutSpec> other_element : other.spouts.entrySet()) {
-
- String other_element_key = other_element.getKey();
- SpoutSpec other_element_value = other_element.getValue();
-
- String __this__spouts_copy_key = other_element_key;
-
- SpoutSpec __this__spouts_copy_value = new SpoutSpec(other_element_value);
-
- __this__spouts.put(__this__spouts_copy_key, __this__spouts_copy_value);
- }
- this.spouts = __this__spouts;
- }
- if (other.is_set_bolts()) {
- Map<String,Bolt> __this__bolts = new HashMap<String,Bolt>();
- for (Map.Entry<String, Bolt> other_element : other.bolts.entrySet()) {
-
- String other_element_key = other_element.getKey();
- Bolt other_element_value = other_element.getValue();
-
- String __this__bolts_copy_key = other_element_key;
-
- Bolt __this__bolts_copy_value = new Bolt(other_element_value);
-
- __this__bolts.put(__this__bolts_copy_key, __this__bolts_copy_value);
- }
- this.bolts = __this__bolts;
- }
- if (other.is_set_state_spouts()) {
- Map<String,StateSpoutSpec> __this__state_spouts = new HashMap<String,StateSpoutSpec>();
- for (Map.Entry<String, StateSpoutSpec> other_element : other.state_spouts.entrySet()) {
-
- String other_element_key = other_element.getKey();
- StateSpoutSpec other_element_value = other_element.getValue();
-
- String __this__state_spouts_copy_key = other_element_key;
-
- StateSpoutSpec __this__state_spouts_copy_value = new StateSpoutSpec(other_element_value);
-
- __this__state_spouts.put(__this__state_spouts_copy_key, __this__state_spouts_copy_value);
- }
- this.state_spouts = __this__state_spouts;
- }
- }
-
- public StormTopology deepCopy() {
- return new StormTopology(this);
- }
-
- @Override
- public void clear() {
- this.spouts = null;
- this.bolts = null;
- this.state_spouts = null;
- }
-
- public int get_spouts_size() {
- return (this.spouts == null) ? 0 : this.spouts.size();
- }
-
- public void put_to_spouts(String key, SpoutSpec val) {
- if (this.spouts == null) {
- this.spouts = new HashMap<String,SpoutSpec>();
- }
- this.spouts.put(key, val);
- }
-
- public Map<String,SpoutSpec> get_spouts() {
- return this.spouts;
- }
-
- public void set_spouts(Map<String,SpoutSpec> spouts) {
- this.spouts = spouts;
- }
-
- public void unset_spouts() {
- this.spouts = null;
- }
-
- /** Returns true if field spouts is set (has been assigned a value) and false otherwise */
- public boolean is_set_spouts() {
- return this.spouts != null;
- }
-
- public void set_spouts_isSet(boolean value) {
- if (!value) {
- this.spouts = null;
- }
- }
-
- public int get_bolts_size() {
- return (this.bolts == null) ? 0 : this.bolts.size();
- }
-
- public void put_to_bolts(String key, Bolt val) {
- if (this.bolts == null) {
- this.bolts = new HashMap<String,Bolt>();
- }
- this.bolts.put(key, val);
- }
-
- public Map<String,Bolt> get_bolts() {
- return this.bolts;
- }
-
- public void set_bolts(Map<String,Bolt> bolts) {
- this.bolts = bolts;
- }
-
- public void unset_bolts() {
- this.bolts = null;
- }
-
- /** Returns true if field bolts is set (has been assigned a value) and false otherwise */
- public boolean is_set_bolts() {
- return this.bolts != null;
- }
-
- public void set_bolts_isSet(boolean value) {
- if (!value) {
- this.bolts = null;
- }
- }
-
- public int get_state_spouts_size() {
- return (this.state_spouts == null) ? 0 : this.state_spouts.size();
- }
-
- public void put_to_state_spouts(String key, StateSpoutSpec val) {
- if (this.state_spouts == null) {
- this.state_spouts = new HashMap<String,StateSpoutSpec>();
- }
- this.state_spouts.put(key, val);
- }
-
- public Map<String,StateSpoutSpec> get_state_spouts() {
- return this.state_spouts;
- }
-
- public void set_state_spouts(Map<String,StateSpoutSpec> state_spouts) {
- this.state_spouts = state_spouts;
- }
-
- public void unset_state_spouts() {
- this.state_spouts = null;
- }
-
- /** Returns true if field state_spouts is set (has been assigned a value) and false otherwise */
- public boolean is_set_state_spouts() {
- return this.state_spouts != null;
- }
-
- public void set_state_spouts_isSet(boolean value) {
- if (!value) {
- this.state_spouts = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case SPOUTS:
- if (value == null) {
- unset_spouts();
- } else {
- set_spouts((Map<String,SpoutSpec>)value);
- }
- break;
-
- case BOLTS:
- if (value == null) {
- unset_bolts();
- } else {
- set_bolts((Map<String,Bolt>)value);
- }
- break;
-
- case STATE_SPOUTS:
- if (value == null) {
- unset_state_spouts();
- } else {
- set_state_spouts((Map<String,StateSpoutSpec>)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case SPOUTS:
- return get_spouts();
-
- case BOLTS:
- return get_bolts();
-
- case STATE_SPOUTS:
- return get_state_spouts();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case SPOUTS:
- return is_set_spouts();
- case BOLTS:
- return is_set_bolts();
- case STATE_SPOUTS:
- return is_set_state_spouts();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof StormTopology)
- return this.equals((StormTopology)that);
- return false;
- }
-
- public boolean equals(StormTopology that) {
- if (that == null)
- return false;
-
- boolean this_present_spouts = true && this.is_set_spouts();
- boolean that_present_spouts = true && that.is_set_spouts();
- if (this_present_spouts || that_present_spouts) {
- if (!(this_present_spouts && that_present_spouts))
- return false;
- if (!this.spouts.equals(that.spouts))
- return false;
- }
-
- boolean this_present_bolts = true && this.is_set_bolts();
- boolean that_present_bolts = true && that.is_set_bolts();
- if (this_present_bolts || that_present_bolts) {
- if (!(this_present_bolts && that_present_bolts))
- return false;
- if (!this.bolts.equals(that.bolts))
- return false;
- }
-
- boolean this_present_state_spouts = true && this.is_set_state_spouts();
- boolean that_present_state_spouts = true && that.is_set_state_spouts();
- if (this_present_state_spouts || that_present_state_spouts) {
- if (!(this_present_state_spouts && that_present_state_spouts))
- return false;
- if (!this.state_spouts.equals(that.state_spouts))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_spouts = true && (is_set_spouts());
- builder.append(present_spouts);
- if (present_spouts)
- builder.append(spouts);
-
- boolean present_bolts = true && (is_set_bolts());
- builder.append(present_bolts);
- if (present_bolts)
- builder.append(bolts);
-
- boolean present_state_spouts = true && (is_set_state_spouts());
- builder.append(present_state_spouts);
- if (present_state_spouts)
- builder.append(state_spouts);
-
- return builder.toHashCode();
- }
-
- public int compareTo(StormTopology other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- StormTopology typedOther = (StormTopology)other;
-
- lastComparison = Boolean.valueOf(is_set_spouts()).compareTo(typedOther.is_set_spouts());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_spouts()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.spouts, typedOther.spouts);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_bolts()).compareTo(typedOther.is_set_bolts());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_bolts()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.bolts, typedOther.bolts);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_state_spouts()).compareTo(typedOther.is_set_state_spouts());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_state_spouts()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.state_spouts, typedOther.state_spouts);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // SPOUTS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map22 = iprot.readMapBegin();
- this.spouts = new HashMap<String,SpoutSpec>(2*_map22.size);
- for (int _i23 = 0; _i23 < _map22.size; ++_i23)
- {
- String _key24; // required
- SpoutSpec _val25; // required
- _key24 = iprot.readString();
- _val25 = new SpoutSpec();
- _val25.read(iprot);
- this.spouts.put(_key24, _val25);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // BOLTS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map26 = iprot.readMapBegin();
- this.bolts = new HashMap<String,Bolt>(2*_map26.size);
- for (int _i27 = 0; _i27 < _map26.size; ++_i27)
- {
- String _key28; // required
- Bolt _val29; // required
- _key28 = iprot.readString();
- _val29 = new Bolt();
- _val29.read(iprot);
- this.bolts.put(_key28, _val29);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // STATE_SPOUTS
- if (field.type == org.apache.thrift7.protocol.TType.MAP) {
- {
- org.apache.thrift7.protocol.TMap _map30 = iprot.readMapBegin();
- this.state_spouts = new HashMap<String,StateSpoutSpec>(2*_map30.size);
- for (int _i31 = 0; _i31 < _map30.size; ++_i31)
- {
- String _key32; // required
- StateSpoutSpec _val33; // required
- _key32 = iprot.readString();
- _val33 = new StateSpoutSpec();
- _val33.read(iprot);
- this.state_spouts.put(_key32, _val33);
- }
- iprot.readMapEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.spouts != null) {
- oprot.writeFieldBegin(SPOUTS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.STRUCT, this.spouts.size()));
- for (Map.Entry<String, SpoutSpec> _iter34 : this.spouts.entrySet())
- {
- oprot.writeString(_iter34.getKey());
- _iter34.getValue().write(oprot);
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.bolts != null) {
- oprot.writeFieldBegin(BOLTS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.STRUCT, this.bolts.size()));
- for (Map.Entry<String, Bolt> _iter35 : this.bolts.entrySet())
- {
- oprot.writeString(_iter35.getKey());
- _iter35.getValue().write(oprot);
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- if (this.state_spouts != null) {
- oprot.writeFieldBegin(STATE_SPOUTS_FIELD_DESC);
- {
- oprot.writeMapBegin(new org.apache.thrift7.protocol.TMap(org.apache.thrift7.protocol.TType.STRING, org.apache.thrift7.protocol.TType.STRUCT, this.state_spouts.size()));
- for (Map.Entry<String, StateSpoutSpec> _iter36 : this.state_spouts.entrySet())
- {
- oprot.writeString(_iter36.getKey());
- _iter36.getValue().write(oprot);
- }
- oprot.writeMapEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("StormTopology(");
- boolean first = true;
-
- sb.append("spouts:");
- if (this.spouts == null) {
- sb.append("null");
- } else {
- sb.append(this.spouts);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("bolts:");
- if (this.bolts == null) {
- sb.append("null");
- } else {
- sb.append(this.bolts);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("state_spouts:");
- if (this.state_spouts == null) {
- sb.append("null");
- } else {
- sb.append(this.state_spouts);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_spouts()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'spouts' is unset! Struct:" + toString());
- }
-
- if (!is_set_bolts()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'bolts' is unset! Struct:" + toString());
- }
-
- if (!is_set_state_spouts()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'state_spouts' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/StreamInfo.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/StreamInfo.java b/jstorm-client/src/main/java/backtype/storm/generated/StreamInfo.java
deleted file mode 100644
index 83a23df..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/StreamInfo.java
+++ /dev/null
@@ -1,462 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class StreamInfo implements org.apache.thrift7.TBase<StreamInfo, StreamInfo._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("StreamInfo");
-
- private static final org.apache.thrift7.protocol.TField OUTPUT_FIELDS_FIELD_DESC = new org.apache.thrift7.protocol.TField("output_fields", org.apache.thrift7.protocol.TType.LIST, (short)1);
- private static final org.apache.thrift7.protocol.TField DIRECT_FIELD_DESC = new org.apache.thrift7.protocol.TField("direct", org.apache.thrift7.protocol.TType.BOOL, (short)2);
-
- private List<String> output_fields; // required
- private boolean direct; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- OUTPUT_FIELDS((short)1, "output_fields"),
- DIRECT((short)2, "direct");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // OUTPUT_FIELDS
- return OUTPUT_FIELDS;
- case 2: // DIRECT
- return DIRECT;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __DIRECT_ISSET_ID = 0;
- private BitSet __isset_bit_vector = new BitSet(1);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.OUTPUT_FIELDS, new org.apache.thrift7.meta_data.FieldMetaData("output_fields", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.ListMetaData(org.apache.thrift7.protocol.TType.LIST,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING))));
- tmpMap.put(_Fields.DIRECT, new org.apache.thrift7.meta_data.FieldMetaData("direct", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.BOOL)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(StreamInfo.class, metaDataMap);
- }
-
- public StreamInfo() {
- }
-
- public StreamInfo(
- List<String> output_fields,
- boolean direct)
- {
- this();
- this.output_fields = output_fields;
- this.direct = direct;
- set_direct_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public StreamInfo(StreamInfo other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_output_fields()) {
- List<String> __this__output_fields = new ArrayList<String>();
- for (String other_element : other.output_fields) {
- __this__output_fields.add(other_element);
- }
- this.output_fields = __this__output_fields;
- }
- this.direct = other.direct;
- }
-
- public StreamInfo deepCopy() {
- return new StreamInfo(this);
- }
-
- @Override
- public void clear() {
- this.output_fields = null;
- set_direct_isSet(false);
- this.direct = false;
- }
-
- public int get_output_fields_size() {
- return (this.output_fields == null) ? 0 : this.output_fields.size();
- }
-
- public java.util.Iterator<String> get_output_fields_iterator() {
- return (this.output_fields == null) ? null : this.output_fields.iterator();
- }
-
- public void add_to_output_fields(String elem) {
- if (this.output_fields == null) {
- this.output_fields = new ArrayList<String>();
- }
- this.output_fields.add(elem);
- }
-
- public List<String> get_output_fields() {
- return this.output_fields;
- }
-
- public void set_output_fields(List<String> output_fields) {
- this.output_fields = output_fields;
- }
-
- public void unset_output_fields() {
- this.output_fields = null;
- }
-
- /** Returns true if field output_fields is set (has been assigned a value) and false otherwise */
- public boolean is_set_output_fields() {
- return this.output_fields != null;
- }
-
- public void set_output_fields_isSet(boolean value) {
- if (!value) {
- this.output_fields = null;
- }
- }
-
- public boolean is_direct() {
- return this.direct;
- }
-
- public void set_direct(boolean direct) {
- this.direct = direct;
- set_direct_isSet(true);
- }
-
- public void unset_direct() {
- __isset_bit_vector.clear(__DIRECT_ISSET_ID);
- }
-
- /** Returns true if field direct is set (has been assigned a value) and false otherwise */
- public boolean is_set_direct() {
- return __isset_bit_vector.get(__DIRECT_ISSET_ID);
- }
-
- public void set_direct_isSet(boolean value) {
- __isset_bit_vector.set(__DIRECT_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case OUTPUT_FIELDS:
- if (value == null) {
- unset_output_fields();
- } else {
- set_output_fields((List<String>)value);
- }
- break;
-
- case DIRECT:
- if (value == null) {
- unset_direct();
- } else {
- set_direct((Boolean)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case OUTPUT_FIELDS:
- return get_output_fields();
-
- case DIRECT:
- return Boolean.valueOf(is_direct());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case OUTPUT_FIELDS:
- return is_set_output_fields();
- case DIRECT:
- return is_set_direct();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof StreamInfo)
- return this.equals((StreamInfo)that);
- return false;
- }
-
- public boolean equals(StreamInfo that) {
- if (that == null)
- return false;
-
- boolean this_present_output_fields = true && this.is_set_output_fields();
- boolean that_present_output_fields = true && that.is_set_output_fields();
- if (this_present_output_fields || that_present_output_fields) {
- if (!(this_present_output_fields && that_present_output_fields))
- return false;
- if (!this.output_fields.equals(that.output_fields))
- return false;
- }
-
- boolean this_present_direct = true;
- boolean that_present_direct = true;
- if (this_present_direct || that_present_direct) {
- if (!(this_present_direct && that_present_direct))
- return false;
- if (this.direct != that.direct)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_output_fields = true && (is_set_output_fields());
- builder.append(present_output_fields);
- if (present_output_fields)
- builder.append(output_fields);
-
- boolean present_direct = true;
- builder.append(present_direct);
- if (present_direct)
- builder.append(direct);
-
- return builder.toHashCode();
- }
-
- public int compareTo(StreamInfo other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- StreamInfo typedOther = (StreamInfo)other;
-
- lastComparison = Boolean.valueOf(is_set_output_fields()).compareTo(typedOther.is_set_output_fields());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_output_fields()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.output_fields, typedOther.output_fields);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_direct()).compareTo(typedOther.is_set_direct());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_direct()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.direct, typedOther.direct);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // OUTPUT_FIELDS
- if (field.type == org.apache.thrift7.protocol.TType.LIST) {
- {
- org.apache.thrift7.protocol.TList _list8 = iprot.readListBegin();
- this.output_fields = new ArrayList<String>(_list8.size);
- for (int _i9 = 0; _i9 < _list8.size; ++_i9)
- {
- String _elem10; // required
- _elem10 = iprot.readString();
- this.output_fields.add(_elem10);
- }
- iprot.readListEnd();
- }
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // DIRECT
- if (field.type == org.apache.thrift7.protocol.TType.BOOL) {
- this.direct = iprot.readBool();
- set_direct_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.output_fields != null) {
- oprot.writeFieldBegin(OUTPUT_FIELDS_FIELD_DESC);
- {
- oprot.writeListBegin(new org.apache.thrift7.protocol.TList(org.apache.thrift7.protocol.TType.STRING, this.output_fields.size()));
- for (String _iter11 : this.output_fields)
- {
- oprot.writeString(_iter11);
- }
- oprot.writeListEnd();
- }
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(DIRECT_FIELD_DESC);
- oprot.writeBool(this.direct);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("StreamInfo(");
- boolean first = true;
-
- sb.append("output_fields:");
- if (this.output_fields == null) {
- sb.append("null");
- } else {
- sb.append(this.output_fields);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("direct:");
- sb.append(this.direct);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_output_fields()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'output_fields' is unset! Struct:" + toString());
- }
-
- if (!is_set_direct()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'direct' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/SubmitOptions.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/SubmitOptions.java b/jstorm-client/src/main/java/backtype/storm/generated/SubmitOptions.java
deleted file mode 100644
index 8b4ffc7..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/SubmitOptions.java
+++ /dev/null
@@ -1,340 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SubmitOptions implements org.apache.thrift7.TBase<SubmitOptions, SubmitOptions._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("SubmitOptions");
-
- private static final org.apache.thrift7.protocol.TField INITIAL_STATUS_FIELD_DESC = new org.apache.thrift7.protocol.TField("initial_status", org.apache.thrift7.protocol.TType.I32, (short)1);
-
- private TopologyInitialStatus initial_status; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- /**
- *
- * @see TopologyInitialStatus
- */
- INITIAL_STATUS((short)1, "initial_status");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // INITIAL_STATUS
- return INITIAL_STATUS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.INITIAL_STATUS, new org.apache.thrift7.meta_data.FieldMetaData("initial_status", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.EnumMetaData(org.apache.thrift7.protocol.TType.ENUM, TopologyInitialStatus.class)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(SubmitOptions.class, metaDataMap);
- }
-
- public SubmitOptions() {
- }
-
- public SubmitOptions(
- TopologyInitialStatus initial_status)
- {
- this();
- this.initial_status = initial_status;
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public SubmitOptions(SubmitOptions other) {
- if (other.is_set_initial_status()) {
- this.initial_status = other.initial_status;
- }
- }
-
- public SubmitOptions deepCopy() {
- return new SubmitOptions(this);
- }
-
- @Override
- public void clear() {
- this.initial_status = null;
- }
-
- /**
- *
- * @see TopologyInitialStatus
- */
- public TopologyInitialStatus get_initial_status() {
- return this.initial_status;
- }
-
- /**
- *
- * @see TopologyInitialStatus
- */
- public void set_initial_status(TopologyInitialStatus initial_status) {
- this.initial_status = initial_status;
- }
-
- public void unset_initial_status() {
- this.initial_status = null;
- }
-
- /** Returns true if field initial_status is set (has been assigned a value) and false otherwise */
- public boolean is_set_initial_status() {
- return this.initial_status != null;
- }
-
- public void set_initial_status_isSet(boolean value) {
- if (!value) {
- this.initial_status = null;
- }
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case INITIAL_STATUS:
- if (value == null) {
- unset_initial_status();
- } else {
- set_initial_status((TopologyInitialStatus)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case INITIAL_STATUS:
- return get_initial_status();
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case INITIAL_STATUS:
- return is_set_initial_status();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof SubmitOptions)
- return this.equals((SubmitOptions)that);
- return false;
- }
-
- public boolean equals(SubmitOptions that) {
- if (that == null)
- return false;
-
- boolean this_present_initial_status = true && this.is_set_initial_status();
- boolean that_present_initial_status = true && that.is_set_initial_status();
- if (this_present_initial_status || that_present_initial_status) {
- if (!(this_present_initial_status && that_present_initial_status))
- return false;
- if (!this.initial_status.equals(that.initial_status))
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_initial_status = true && (is_set_initial_status());
- builder.append(present_initial_status);
- if (present_initial_status)
- builder.append(initial_status.getValue());
-
- return builder.toHashCode();
- }
-
- public int compareTo(SubmitOptions other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- SubmitOptions typedOther = (SubmitOptions)other;
-
- lastComparison = Boolean.valueOf(is_set_initial_status()).compareTo(typedOther.is_set_initial_status());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_initial_status()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.initial_status, typedOther.initial_status);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // INITIAL_STATUS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.initial_status = TopologyInitialStatus.findByValue(iprot.readI32());
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.initial_status != null) {
- oprot.writeFieldBegin(INITIAL_STATUS_FIELD_DESC);
- oprot.writeI32(this.initial_status.getValue());
- oprot.writeFieldEnd();
- }
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("SubmitOptions(");
- boolean first = true;
-
- sb.append("initial_status:");
- if (this.initial_status == null) {
- sb.append("null");
- } else {
- sb.append(this.initial_status);
- }
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_initial_status()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'initial_status' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/generated/SupervisorSummary.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/generated/SupervisorSummary.java b/jstorm-client/src/main/java/backtype/storm/generated/SupervisorSummary.java
deleted file mode 100644
index 4f7cb8a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/generated/SupervisorSummary.java
+++ /dev/null
@@ -1,706 +0,0 @@
-/**
- * Autogenerated by Thrift Compiler (0.7.0)
- *
- * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
- */
-package backtype.storm.generated;
-
-import org.apache.commons.lang.builder.HashCodeBuilder;
-import java.util.List;
-import java.util.ArrayList;
-import java.util.Map;
-import java.util.HashMap;
-import java.util.EnumMap;
-import java.util.Set;
-import java.util.HashSet;
-import java.util.EnumSet;
-import java.util.Collections;
-import java.util.BitSet;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class SupervisorSummary implements org.apache.thrift7.TBase<SupervisorSummary, SupervisorSummary._Fields>, java.io.Serializable, Cloneable {
- private static final org.apache.thrift7.protocol.TStruct STRUCT_DESC = new org.apache.thrift7.protocol.TStruct("SupervisorSummary");
-
- private static final org.apache.thrift7.protocol.TField HOST_FIELD_DESC = new org.apache.thrift7.protocol.TField("host", org.apache.thrift7.protocol.TType.STRING, (short)1);
- private static final org.apache.thrift7.protocol.TField SUPERVISOR_ID_FIELD_DESC = new org.apache.thrift7.protocol.TField("supervisor_id", org.apache.thrift7.protocol.TType.STRING, (short)2);
- private static final org.apache.thrift7.protocol.TField UPTIME_SECS_FIELD_DESC = new org.apache.thrift7.protocol.TField("uptime_secs", org.apache.thrift7.protocol.TType.I32, (short)3);
- private static final org.apache.thrift7.protocol.TField NUM_WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("num_workers", org.apache.thrift7.protocol.TType.I32, (short)4);
- private static final org.apache.thrift7.protocol.TField NUM_USED_WORKERS_FIELD_DESC = new org.apache.thrift7.protocol.TField("num_used_workers", org.apache.thrift7.protocol.TType.I32, (short)5);
-
- private String host; // required
- private String supervisor_id; // required
- private int uptime_secs; // required
- private int num_workers; // required
- private int num_used_workers; // required
-
- /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
- public enum _Fields implements org.apache.thrift7.TFieldIdEnum {
- HOST((short)1, "host"),
- SUPERVISOR_ID((short)2, "supervisor_id"),
- UPTIME_SECS((short)3, "uptime_secs"),
- NUM_WORKERS((short)4, "num_workers"),
- NUM_USED_WORKERS((short)5, "num_used_workers");
-
- private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
-
- static {
- for (_Fields field : EnumSet.allOf(_Fields.class)) {
- byName.put(field.getFieldName(), field);
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, or null if its not found.
- */
- public static _Fields findByThriftId(int fieldId) {
- switch(fieldId) {
- case 1: // HOST
- return HOST;
- case 2: // SUPERVISOR_ID
- return SUPERVISOR_ID;
- case 3: // UPTIME_SECS
- return UPTIME_SECS;
- case 4: // NUM_WORKERS
- return NUM_WORKERS;
- case 5: // NUM_USED_WORKERS
- return NUM_USED_WORKERS;
- default:
- return null;
- }
- }
-
- /**
- * Find the _Fields constant that matches fieldId, throwing an exception
- * if it is not found.
- */
- public static _Fields findByThriftIdOrThrow(int fieldId) {
- _Fields fields = findByThriftId(fieldId);
- if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
- return fields;
- }
-
- /**
- * Find the _Fields constant that matches name, or null if its not found.
- */
- public static _Fields findByName(String name) {
- return byName.get(name);
- }
-
- private final short _thriftId;
- private final String _fieldName;
-
- _Fields(short thriftId, String fieldName) {
- _thriftId = thriftId;
- _fieldName = fieldName;
- }
-
- public short getThriftFieldId() {
- return _thriftId;
- }
-
- public String getFieldName() {
- return _fieldName;
- }
- }
-
- // isset id assignments
- private static final int __UPTIME_SECS_ISSET_ID = 0;
- private static final int __NUM_WORKERS_ISSET_ID = 1;
- private static final int __NUM_USED_WORKERS_ISSET_ID = 2;
- private BitSet __isset_bit_vector = new BitSet(3);
-
- public static final Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> metaDataMap;
- static {
- Map<_Fields, org.apache.thrift7.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift7.meta_data.FieldMetaData>(_Fields.class);
- tmpMap.put(_Fields.HOST, new org.apache.thrift7.meta_data.FieldMetaData("host", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.SUPERVISOR_ID, new org.apache.thrift7.meta_data.FieldMetaData("supervisor_id", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.STRING)));
- tmpMap.put(_Fields.UPTIME_SECS, new org.apache.thrift7.meta_data.FieldMetaData("uptime_secs", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.NUM_WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("num_workers", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- tmpMap.put(_Fields.NUM_USED_WORKERS, new org.apache.thrift7.meta_data.FieldMetaData("num_used_workers", org.apache.thrift7.TFieldRequirementType.REQUIRED,
- new org.apache.thrift7.meta_data.FieldValueMetaData(org.apache.thrift7.protocol.TType.I32)));
- metaDataMap = Collections.unmodifiableMap(tmpMap);
- org.apache.thrift7.meta_data.FieldMetaData.addStructMetaDataMap(SupervisorSummary.class, metaDataMap);
- }
-
- public SupervisorSummary() {
- }
-
- public SupervisorSummary(
- String host,
- String supervisor_id,
- int uptime_secs,
- int num_workers,
- int num_used_workers)
- {
- this();
- this.host = host;
- this.supervisor_id = supervisor_id;
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- this.num_workers = num_workers;
- set_num_workers_isSet(true);
- this.num_used_workers = num_used_workers;
- set_num_used_workers_isSet(true);
- }
-
- /**
- * Performs a deep copy on <i>other</i>.
- */
- public SupervisorSummary(SupervisorSummary other) {
- __isset_bit_vector.clear();
- __isset_bit_vector.or(other.__isset_bit_vector);
- if (other.is_set_host()) {
- this.host = other.host;
- }
- if (other.is_set_supervisor_id()) {
- this.supervisor_id = other.supervisor_id;
- }
- this.uptime_secs = other.uptime_secs;
- this.num_workers = other.num_workers;
- this.num_used_workers = other.num_used_workers;
- }
-
- public SupervisorSummary deepCopy() {
- return new SupervisorSummary(this);
- }
-
- @Override
- public void clear() {
- this.host = null;
- this.supervisor_id = null;
- set_uptime_secs_isSet(false);
- this.uptime_secs = 0;
- set_num_workers_isSet(false);
- this.num_workers = 0;
- set_num_used_workers_isSet(false);
- this.num_used_workers = 0;
- }
-
- public String get_host() {
- return this.host;
- }
-
- public void set_host(String host) {
- this.host = host;
- }
-
- public void unset_host() {
- this.host = null;
- }
-
- /** Returns true if field host is set (has been assigned a value) and false otherwise */
- public boolean is_set_host() {
- return this.host != null;
- }
-
- public void set_host_isSet(boolean value) {
- if (!value) {
- this.host = null;
- }
- }
-
- public String get_supervisor_id() {
- return this.supervisor_id;
- }
-
- public void set_supervisor_id(String supervisor_id) {
- this.supervisor_id = supervisor_id;
- }
-
- public void unset_supervisor_id() {
- this.supervisor_id = null;
- }
-
- /** Returns true if field supervisor_id is set (has been assigned a value) and false otherwise */
- public boolean is_set_supervisor_id() {
- return this.supervisor_id != null;
- }
-
- public void set_supervisor_id_isSet(boolean value) {
- if (!value) {
- this.supervisor_id = null;
- }
- }
-
- public int get_uptime_secs() {
- return this.uptime_secs;
- }
-
- public void set_uptime_secs(int uptime_secs) {
- this.uptime_secs = uptime_secs;
- set_uptime_secs_isSet(true);
- }
-
- public void unset_uptime_secs() {
- __isset_bit_vector.clear(__UPTIME_SECS_ISSET_ID);
- }
-
- /** Returns true if field uptime_secs is set (has been assigned a value) and false otherwise */
- public boolean is_set_uptime_secs() {
- return __isset_bit_vector.get(__UPTIME_SECS_ISSET_ID);
- }
-
- public void set_uptime_secs_isSet(boolean value) {
- __isset_bit_vector.set(__UPTIME_SECS_ISSET_ID, value);
- }
-
- public int get_num_workers() {
- return this.num_workers;
- }
-
- public void set_num_workers(int num_workers) {
- this.num_workers = num_workers;
- set_num_workers_isSet(true);
- }
-
- public void unset_num_workers() {
- __isset_bit_vector.clear(__NUM_WORKERS_ISSET_ID);
- }
-
- /** Returns true if field num_workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_num_workers() {
- return __isset_bit_vector.get(__NUM_WORKERS_ISSET_ID);
- }
-
- public void set_num_workers_isSet(boolean value) {
- __isset_bit_vector.set(__NUM_WORKERS_ISSET_ID, value);
- }
-
- public int get_num_used_workers() {
- return this.num_used_workers;
- }
-
- public void set_num_used_workers(int num_used_workers) {
- this.num_used_workers = num_used_workers;
- set_num_used_workers_isSet(true);
- }
-
- public void unset_num_used_workers() {
- __isset_bit_vector.clear(__NUM_USED_WORKERS_ISSET_ID);
- }
-
- /** Returns true if field num_used_workers is set (has been assigned a value) and false otherwise */
- public boolean is_set_num_used_workers() {
- return __isset_bit_vector.get(__NUM_USED_WORKERS_ISSET_ID);
- }
-
- public void set_num_used_workers_isSet(boolean value) {
- __isset_bit_vector.set(__NUM_USED_WORKERS_ISSET_ID, value);
- }
-
- public void setFieldValue(_Fields field, Object value) {
- switch (field) {
- case HOST:
- if (value == null) {
- unset_host();
- } else {
- set_host((String)value);
- }
- break;
-
- case SUPERVISOR_ID:
- if (value == null) {
- unset_supervisor_id();
- } else {
- set_supervisor_id((String)value);
- }
- break;
-
- case UPTIME_SECS:
- if (value == null) {
- unset_uptime_secs();
- } else {
- set_uptime_secs((Integer)value);
- }
- break;
-
- case NUM_WORKERS:
- if (value == null) {
- unset_num_workers();
- } else {
- set_num_workers((Integer)value);
- }
- break;
-
- case NUM_USED_WORKERS:
- if (value == null) {
- unset_num_used_workers();
- } else {
- set_num_used_workers((Integer)value);
- }
- break;
-
- }
- }
-
- public Object getFieldValue(_Fields field) {
- switch (field) {
- case HOST:
- return get_host();
-
- case SUPERVISOR_ID:
- return get_supervisor_id();
-
- case UPTIME_SECS:
- return Integer.valueOf(get_uptime_secs());
-
- case NUM_WORKERS:
- return Integer.valueOf(get_num_workers());
-
- case NUM_USED_WORKERS:
- return Integer.valueOf(get_num_used_workers());
-
- }
- throw new IllegalStateException();
- }
-
- /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
- public boolean isSet(_Fields field) {
- if (field == null) {
- throw new IllegalArgumentException();
- }
-
- switch (field) {
- case HOST:
- return is_set_host();
- case SUPERVISOR_ID:
- return is_set_supervisor_id();
- case UPTIME_SECS:
- return is_set_uptime_secs();
- case NUM_WORKERS:
- return is_set_num_workers();
- case NUM_USED_WORKERS:
- return is_set_num_used_workers();
- }
- throw new IllegalStateException();
- }
-
- @Override
- public boolean equals(Object that) {
- if (that == null)
- return false;
- if (that instanceof SupervisorSummary)
- return this.equals((SupervisorSummary)that);
- return false;
- }
-
- public boolean equals(SupervisorSummary that) {
- if (that == null)
- return false;
-
- boolean this_present_host = true && this.is_set_host();
- boolean that_present_host = true && that.is_set_host();
- if (this_present_host || that_present_host) {
- if (!(this_present_host && that_present_host))
- return false;
- if (!this.host.equals(that.host))
- return false;
- }
-
- boolean this_present_supervisor_id = true && this.is_set_supervisor_id();
- boolean that_present_supervisor_id = true && that.is_set_supervisor_id();
- if (this_present_supervisor_id || that_present_supervisor_id) {
- if (!(this_present_supervisor_id && that_present_supervisor_id))
- return false;
- if (!this.supervisor_id.equals(that.supervisor_id))
- return false;
- }
-
- boolean this_present_uptime_secs = true;
- boolean that_present_uptime_secs = true;
- if (this_present_uptime_secs || that_present_uptime_secs) {
- if (!(this_present_uptime_secs && that_present_uptime_secs))
- return false;
- if (this.uptime_secs != that.uptime_secs)
- return false;
- }
-
- boolean this_present_num_workers = true;
- boolean that_present_num_workers = true;
- if (this_present_num_workers || that_present_num_workers) {
- if (!(this_present_num_workers && that_present_num_workers))
- return false;
- if (this.num_workers != that.num_workers)
- return false;
- }
-
- boolean this_present_num_used_workers = true;
- boolean that_present_num_used_workers = true;
- if (this_present_num_used_workers || that_present_num_used_workers) {
- if (!(this_present_num_used_workers && that_present_num_used_workers))
- return false;
- if (this.num_used_workers != that.num_used_workers)
- return false;
- }
-
- return true;
- }
-
- @Override
- public int hashCode() {
- HashCodeBuilder builder = new HashCodeBuilder();
-
- boolean present_host = true && (is_set_host());
- builder.append(present_host);
- if (present_host)
- builder.append(host);
-
- boolean present_supervisor_id = true && (is_set_supervisor_id());
- builder.append(present_supervisor_id);
- if (present_supervisor_id)
- builder.append(supervisor_id);
-
- boolean present_uptime_secs = true;
- builder.append(present_uptime_secs);
- if (present_uptime_secs)
- builder.append(uptime_secs);
-
- boolean present_num_workers = true;
- builder.append(present_num_workers);
- if (present_num_workers)
- builder.append(num_workers);
-
- boolean present_num_used_workers = true;
- builder.append(present_num_used_workers);
- if (present_num_used_workers)
- builder.append(num_used_workers);
-
- return builder.toHashCode();
- }
-
- public int compareTo(SupervisorSummary other) {
- if (!getClass().equals(other.getClass())) {
- return getClass().getName().compareTo(other.getClass().getName());
- }
-
- int lastComparison = 0;
- SupervisorSummary typedOther = (SupervisorSummary)other;
-
- lastComparison = Boolean.valueOf(is_set_host()).compareTo(typedOther.is_set_host());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_host()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.host, typedOther.host);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_supervisor_id()).compareTo(typedOther.is_set_supervisor_id());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_supervisor_id()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.supervisor_id, typedOther.supervisor_id);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_uptime_secs()).compareTo(typedOther.is_set_uptime_secs());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_uptime_secs()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.uptime_secs, typedOther.uptime_secs);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_num_workers()).compareTo(typedOther.is_set_num_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_num_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.num_workers, typedOther.num_workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- lastComparison = Boolean.valueOf(is_set_num_used_workers()).compareTo(typedOther.is_set_num_used_workers());
- if (lastComparison != 0) {
- return lastComparison;
- }
- if (is_set_num_used_workers()) {
- lastComparison = org.apache.thrift7.TBaseHelper.compareTo(this.num_used_workers, typedOther.num_used_workers);
- if (lastComparison != 0) {
- return lastComparison;
- }
- }
- return 0;
- }
-
- public _Fields fieldForId(int fieldId) {
- return _Fields.findByThriftId(fieldId);
- }
-
- public void read(org.apache.thrift7.protocol.TProtocol iprot) throws org.apache.thrift7.TException {
- org.apache.thrift7.protocol.TField field;
- iprot.readStructBegin();
- while (true)
- {
- field = iprot.readFieldBegin();
- if (field.type == org.apache.thrift7.protocol.TType.STOP) {
- break;
- }
- switch (field.id) {
- case 1: // HOST
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.host = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 2: // SUPERVISOR_ID
- if (field.type == org.apache.thrift7.protocol.TType.STRING) {
- this.supervisor_id = iprot.readString();
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 3: // UPTIME_SECS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.uptime_secs = iprot.readI32();
- set_uptime_secs_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 4: // NUM_WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.num_workers = iprot.readI32();
- set_num_workers_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- case 5: // NUM_USED_WORKERS
- if (field.type == org.apache.thrift7.protocol.TType.I32) {
- this.num_used_workers = iprot.readI32();
- set_num_used_workers_isSet(true);
- } else {
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- break;
- default:
- org.apache.thrift7.protocol.TProtocolUtil.skip(iprot, field.type);
- }
- iprot.readFieldEnd();
- }
- iprot.readStructEnd();
- validate();
- }
-
- public void write(org.apache.thrift7.protocol.TProtocol oprot) throws org.apache.thrift7.TException {
- validate();
-
- oprot.writeStructBegin(STRUCT_DESC);
- if (this.host != null) {
- oprot.writeFieldBegin(HOST_FIELD_DESC);
- oprot.writeString(this.host);
- oprot.writeFieldEnd();
- }
- if (this.supervisor_id != null) {
- oprot.writeFieldBegin(SUPERVISOR_ID_FIELD_DESC);
- oprot.writeString(this.supervisor_id);
- oprot.writeFieldEnd();
- }
- oprot.writeFieldBegin(UPTIME_SECS_FIELD_DESC);
- oprot.writeI32(this.uptime_secs);
- oprot.writeFieldEnd();
- oprot.writeFieldBegin(NUM_WORKERS_FIELD_DESC);
- oprot.writeI32(this.num_workers);
- oprot.writeFieldEnd();
- oprot.writeFieldBegin(NUM_USED_WORKERS_FIELD_DESC);
- oprot.writeI32(this.num_used_workers);
- oprot.writeFieldEnd();
- oprot.writeFieldStop();
- oprot.writeStructEnd();
- }
-
- @Override
- public String toString() {
- StringBuilder sb = new StringBuilder("SupervisorSummary(");
- boolean first = true;
-
- sb.append("host:");
- if (this.host == null) {
- sb.append("null");
- } else {
- sb.append(this.host);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("supervisor_id:");
- if (this.supervisor_id == null) {
- sb.append("null");
- } else {
- sb.append(this.supervisor_id);
- }
- first = false;
- if (!first) sb.append(", ");
- sb.append("uptime_secs:");
- sb.append(this.uptime_secs);
- first = false;
- if (!first) sb.append(", ");
- sb.append("num_workers:");
- sb.append(this.num_workers);
- first = false;
- if (!first) sb.append(", ");
- sb.append("num_used_workers:");
- sb.append(this.num_used_workers);
- first = false;
- sb.append(")");
- return sb.toString();
- }
-
- public void validate() throws org.apache.thrift7.TException {
- // check for required fields
- if (!is_set_host()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'host' is unset! Struct:" + toString());
- }
-
- if (!is_set_supervisor_id()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'supervisor_id' is unset! Struct:" + toString());
- }
-
- if (!is_set_uptime_secs()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'uptime_secs' is unset! Struct:" + toString());
- }
-
- if (!is_set_num_workers()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'num_workers' is unset! Struct:" + toString());
- }
-
- if (!is_set_num_used_workers()) {
- throw new org.apache.thrift7.protocol.TProtocolException("Required field 'num_used_workers' is unset! Struct:" + toString());
- }
-
- }
-
- private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
- try {
- write(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(out)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
- private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
- try {
- // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
- __isset_bit_vector = new BitSet(1);
- read(new org.apache.thrift7.protocol.TCompactProtocol(new org.apache.thrift7.transport.TIOStreamTransport(in)));
- } catch (org.apache.thrift7.TException te) {
- throw new java.io.IOException(te);
- }
- }
-
-}
-
[21/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/TridentTopology.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/TridentTopology.java b/jstorm-client/src/main/java/storm/trident/TridentTopology.java
deleted file mode 100644
index 7b4b00d..0000000
--- a/jstorm-client/src/main/java/storm/trident/TridentTopology.java
+++ /dev/null
@@ -1,796 +0,0 @@
-package storm.trident;
-
-import backtype.storm.Config;
-import backtype.storm.ILocalDRPC;
-import backtype.storm.drpc.DRPCSpout;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeMap;
-import org.jgrapht.DirectedGraph;
-import org.jgrapht.UndirectedGraph;
-import org.jgrapht.alg.ConnectivityInspector;
-import org.jgrapht.graph.DefaultDirectedGraph;
-import org.jgrapht.graph.Pseudograph;
-import storm.trident.drpc.ReturnResultsReducer;
-import storm.trident.fluent.GroupedStream;
-import storm.trident.fluent.IAggregatableStream;
-import storm.trident.fluent.UniqueIdGen;
-import storm.trident.graph.GraphGrouper;
-import storm.trident.graph.Group;
-import storm.trident.operation.GroupedMultiReducer;
-import storm.trident.operation.MultiReducer;
-import storm.trident.operation.impl.FilterExecutor;
-import storm.trident.operation.impl.GroupedMultiReducerExecutor;
-import storm.trident.operation.impl.IdentityMultiReducer;
-import storm.trident.operation.impl.JoinerMultiReducer;
-import storm.trident.operation.impl.TrueFilter;
-import storm.trident.partition.IdentityGrouping;
-import storm.trident.planner.Node;
-import storm.trident.planner.NodeStateInfo;
-import storm.trident.planner.PartitionNode;
-import storm.trident.planner.ProcessorNode;
-import storm.trident.planner.SpoutNode;
-import storm.trident.planner.SubtopologyBolt;
-import storm.trident.planner.processor.EachProcessor;
-import storm.trident.planner.processor.MultiReducerProcessor;
-import storm.trident.spout.BatchSpoutExecutor;
-import storm.trident.spout.IBatchSpout;
-import storm.trident.spout.IOpaquePartitionedTridentSpout;
-import storm.trident.spout.IPartitionedTridentSpout;
-import storm.trident.spout.ITridentSpout;
-import storm.trident.spout.OpaquePartitionedTridentSpoutExecutor;
-import storm.trident.spout.PartitionedTridentSpoutExecutor;
-import storm.trident.spout.RichSpoutBatchExecutor;
-import storm.trident.state.StateFactory;
-import storm.trident.state.StateSpec;
-import storm.trident.topology.TridentTopologyBuilder;
-import storm.trident.util.ErrorEdgeFactory;
-import storm.trident.util.IndexedEdge;
-import storm.trident.util.TridentUtils;
-
-
-// graph with 3 kinds of nodes:
-// operation, partition, or spout
-// all operations have finishBatch and can optionally be committers
-public class TridentTopology {
-
- //TODO: add a method for drpc stream, needs to know how to automatically do returnresults, etc
- // is it too expensive to do a batch per drpc request?
-
- DefaultDirectedGraph<Node, IndexedEdge> _graph;
- Map<String, List<Node>> _colocate = new HashMap();
- UniqueIdGen _gen;
-
- public TridentTopology() {
- _graph = new DefaultDirectedGraph(new ErrorEdgeFactory());
- _gen = new UniqueIdGen();
- }
-
- private TridentTopology(DefaultDirectedGraph<Node, IndexedEdge> graph, Map<String, List<Node>> colocate, UniqueIdGen gen) {
- _graph = graph;
- _colocate = colocate;
- _gen = gen;
- }
-
-
- // automatically turn it into a batch spout, should take parameters as to how much to batch
-// public Stream newStream(IRichSpout spout) {
-// Node n = new SpoutNode(getUniqueStreamId(), TridentUtils.getSingleOutputStreamFields(spout), null, spout, SpoutNode.SpoutType.BATCH);
-// return addNode(n);
-// }
-
- public Stream newStream(String txId, IRichSpout spout) {
- return newStream(txId, new RichSpoutBatchExecutor(spout));
- }
-
- public Stream newStream(String txId, IBatchSpout spout) {
- Node n = new SpoutNode(getUniqueStreamId(), spout.getOutputFields(), txId, spout, SpoutNode.SpoutType.BATCH);
- return addNode(n);
- }
-
- public Stream newStream(String txId, ITridentSpout spout) {
- Node n = new SpoutNode(getUniqueStreamId(), spout.getOutputFields(), txId, spout, SpoutNode.SpoutType.BATCH);
- return addNode(n);
- }
-
- public Stream newStream(String txId, IPartitionedTridentSpout spout) {
- return newStream(txId, new PartitionedTridentSpoutExecutor(spout));
- }
-
- public Stream newStream(String txId, IOpaquePartitionedTridentSpout spout) {
- return newStream(txId, new OpaquePartitionedTridentSpoutExecutor(spout));
- }
-
- public Stream newDRPCStream(String function) {
- return newDRPCStream(new DRPCSpout(function));
- }
-
- public Stream newDRPCStream(String function, ILocalDRPC server) {
- DRPCSpout spout;
- if(server==null) {
- spout = new DRPCSpout(function);
- } else {
- spout = new DRPCSpout(function, server);
- }
- return newDRPCStream(spout);
- }
-
- private Stream newDRPCStream(DRPCSpout spout) {
- // TODO: consider adding a shuffle grouping after the spout to avoid so much routing of the args/return-info all over the place
- // (at least until its possible to just pack bolt logic into the spout itself)
-
- Node n = new SpoutNode(getUniqueStreamId(), TridentUtils.getSingleOutputStreamFields(spout), null, spout, SpoutNode.SpoutType.DRPC);
- Stream nextStream = addNode(n);
- // later on, this will be joined back with return-info and all the results
- return nextStream.project(new Fields("args"));
- }
-
- public TridentState newStaticState(StateFactory factory) {
- return newStaticState(new StateSpec(factory));
- }
-
- public TridentState newStaticState(StateSpec spec) {
- String stateId = getUniqueStateId();
- Node n = new Node(getUniqueStreamId(), null, new Fields());
- n.stateInfo = new NodeStateInfo(stateId, spec);
- registerNode(n);
- return new TridentState(this, n);
- }
-
- public Stream multiReduce(Stream s1, Stream s2, MultiReducer function, Fields outputFields) {
- return multiReduce(Arrays.asList(s1, s2), function, outputFields);
- }
-
- public Stream multiReduce(Fields inputFields1, Stream s1, Fields inputFields2, Stream s2, MultiReducer function, Fields outputFields) {
- return multiReduce(Arrays.asList(inputFields1, inputFields2), Arrays.asList(s1, s2), function, outputFields);
- }
-
- public Stream multiReduce(GroupedStream s1, GroupedStream s2, GroupedMultiReducer function, Fields outputFields) {
- return multiReduce(Arrays.asList(s1, s2), function, outputFields);
- }
-
- public Stream multiReduce(Fields inputFields1, GroupedStream s1, Fields inputFields2, GroupedStream s2, GroupedMultiReducer function, Fields outputFields) {
- return multiReduce(Arrays.asList(inputFields1, inputFields2), Arrays.asList(s1, s2), function, outputFields);
- }
-
- public Stream multiReduce(List<Stream> streams, MultiReducer function, Fields outputFields) {
- return multiReduce(getAllOutputFields(streams), streams, function, outputFields);
- }
-
- public Stream multiReduce(List<GroupedStream> streams, GroupedMultiReducer function, Fields outputFields) {
- return multiReduce(getAllOutputFields(streams), streams, function, outputFields);
- }
-
- public Stream multiReduce(List<Fields> inputFields, List<Stream> streams, MultiReducer function, Fields outputFields) {
- List<String> names = new ArrayList<String>();
- for(Stream s: streams) {
- if(s._name!=null) {
- names.add(s._name);
- }
- }
- Node n = new ProcessorNode(getUniqueStreamId(), Utils.join(names, "-"), outputFields, outputFields, new MultiReducerProcessor(inputFields, function));
- return addSourcedNode(streams, n);
- }
-
- public Stream multiReduce(List<Fields> inputFields, List<GroupedStream> groupedStreams, GroupedMultiReducer function, Fields outputFields) {
- List<Fields> fullInputFields = new ArrayList<Fields>();
- List<Stream> streams = new ArrayList<Stream>();
- List<Fields> fullGroupFields = new ArrayList<Fields>();
- for(int i=0; i<groupedStreams.size(); i++) {
- GroupedStream gs = groupedStreams.get(i);
- Fields groupFields = gs.getGroupFields();
- fullGroupFields.add(groupFields);
- streams.add(gs.toStream().partitionBy(groupFields));
- fullInputFields.add(TridentUtils.fieldsUnion(groupFields, inputFields.get(i)));
-
- }
- return multiReduce(fullInputFields, streams, new GroupedMultiReducerExecutor(function, fullGroupFields, inputFields), outputFields);
- }
-
- public Stream merge(Fields outputFields, Stream... streams) {
- return merge(outputFields, Arrays.asList(streams));
- }
-
- public Stream merge(Fields outputFields, List<Stream> streams) {
- return multiReduce(streams, new IdentityMultiReducer(), outputFields);
- }
-
- public Stream merge(Stream... streams) {
- return merge(Arrays.asList(streams));
- }
-
- public Stream merge(List<Stream> streams) {
- return merge(streams.get(0).getOutputFields(), streams);
- }
-
- public Stream join(Stream s1, Fields joinFields1, Stream s2, Fields joinFields2, Fields outFields) {
- return join(Arrays.asList(s1, s2), Arrays.asList(joinFields1, joinFields2), outFields);
- }
-
- public Stream join(List<Stream> streams, List<Fields> joinFields, Fields outFields) {
- return join(streams, joinFields, outFields, JoinType.INNER);
- }
-
- public Stream join(Stream s1, Fields joinFields1, Stream s2, Fields joinFields2, Fields outFields, JoinType type) {
- return join(Arrays.asList(s1, s2), Arrays.asList(joinFields1, joinFields2), outFields, type);
- }
-
- public Stream join(List<Stream> streams, List<Fields> joinFields, Fields outFields, JoinType type) {
- return join(streams, joinFields, outFields, repeat(streams.size(), type));
- }
-
- public Stream join(Stream s1, Fields joinFields1, Stream s2, Fields joinFields2, Fields outFields, List<JoinType> mixed) {
- return join(Arrays.asList(s1, s2), Arrays.asList(joinFields1, joinFields2), outFields, mixed);
-
- }
-
- public Stream join(List<Stream> streams, List<Fields> joinFields, Fields outFields, List<JoinType> mixed) {
- return multiReduce(strippedInputFields(streams, joinFields),
- groupedStreams(streams, joinFields),
- new JoinerMultiReducer(mixed, joinFields.get(0).size(), strippedInputFields(streams, joinFields)),
- outFields);
- }
-
- public StormTopology build() {
- DefaultDirectedGraph<Node, IndexedEdge> graph = (DefaultDirectedGraph) _graph.clone();
-
-
- completeDRPC(graph, _colocate, _gen);
-
- List<SpoutNode> spoutNodes = new ArrayList<SpoutNode>();
-
- // can be regular nodes (static state) or processor nodes
- Set<Node> boltNodes = new HashSet<Node>();
- for(Node n: graph.vertexSet()) {
- if(n instanceof SpoutNode) {
- spoutNodes.add((SpoutNode) n);
- } else if(!(n instanceof PartitionNode)) {
- boltNodes.add(n);
- }
- }
-
-
- Set<Group> initialGroups = new HashSet<Group>();
- for(List<Node> colocate: _colocate.values()) {
- Group g = new Group(graph, colocate);
- boltNodes.removeAll(colocate);
- initialGroups.add(g);
- }
- for(Node n: boltNodes) {
- initialGroups.add(new Group(graph, n));
- }
-
-
- GraphGrouper grouper = new GraphGrouper(graph, initialGroups);
- grouper.mergeFully();
- Collection<Group> mergedGroups = grouper.getAllGroups();
-
-
-
- // add identity partitions between groups
- for(IndexedEdge<Node> e: new HashSet<IndexedEdge>(graph.edgeSet())) {
- if(!(e.source instanceof PartitionNode) && !(e.target instanceof PartitionNode)) {
- Group g1 = grouper.nodeGroup(e.source);
- Group g2 = grouper.nodeGroup(e.target);
- // g1 being null means the source is a spout node
- if(g1==null && !(e.source instanceof SpoutNode))
- throw new RuntimeException("Planner exception: Null source group must indicate a spout node at this phase of planning");
- if(g1==null || !g1.equals(g2)) {
- graph.removeEdge(e);
- PartitionNode pNode = makeIdentityPartition(e.source);
- graph.addVertex(pNode);
- graph.addEdge(e.source, pNode, new IndexedEdge(e.source, pNode, 0));
- graph.addEdge(pNode, e.target, new IndexedEdge(pNode, e.target, e.index));
- }
- }
- }
- // if one group subscribes to the same stream with same partitioning multiple times,
- // merge those together (otherwise can end up with many output streams created for that partitioning
- // if need to split into multiple output streams because of same input having different
- // partitioning to the group)
-
- // this is because can't currently merge splitting logic into a spout
- // not the most kosher algorithm here, since the grouper indexes are being trounced via the adding of nodes to random groups, but it
- // works out
- List<Node> forNewGroups = new ArrayList<Node>();
- for(Group g: mergedGroups) {
- for(PartitionNode n: extraPartitionInputs(g)) {
- Node idNode = makeIdentityNode(n.allOutputFields);
- Node newPartitionNode = new PartitionNode(idNode.streamId, n.name, idNode.allOutputFields, n.thriftGrouping);
- Node parentNode = TridentUtils.getParent(graph, n);
- Set<IndexedEdge> outgoing = graph.outgoingEdgesOf(n);
- graph.removeVertex(n);
-
- graph.addVertex(idNode);
- graph.addVertex(newPartitionNode);
- addEdge(graph, parentNode, idNode, 0);
- addEdge(graph, idNode, newPartitionNode, 0);
- for(IndexedEdge e: outgoing) {
- addEdge(graph, newPartitionNode, e.target, e.index);
- }
- Group parentGroup = grouper.nodeGroup(parentNode);
- if(parentGroup==null) {
- forNewGroups.add(idNode);
- } else {
- parentGroup.nodes.add(idNode);
- }
- }
- }
- // TODO: in the future, want a way to include this logic in the spout itself,
- // or make it unecessary by having storm include metadata about which grouping a tuple
- // came from
-
- for(Node n: forNewGroups) {
- grouper.addGroup(new Group(graph, n));
- }
-
- // add in spouts as groups so we can get parallelisms
- for(Node n: spoutNodes) {
- grouper.addGroup(new Group(graph, n));
- }
-
- grouper.reindex();
- mergedGroups = grouper.getAllGroups();
-
-
- Map<Node, String> batchGroupMap = new HashMap();
- List<Set<Node>> connectedComponents = new ConnectivityInspector<Node, IndexedEdge>(graph).connectedSets();
- for(int i=0; i<connectedComponents.size(); i++) {
- String groupId = "bg" + i;
- for(Node n: connectedComponents.get(i)) {
- batchGroupMap.put(n, groupId);
- }
- }
-
-// System.out.println("GRAPH:");
-// System.out.println(graph);
-
- Map<Group, Integer> parallelisms = getGroupParallelisms(graph, grouper, mergedGroups);
-
- TridentTopologyBuilder builder = new TridentTopologyBuilder();
-
- Map<Node, String> spoutIds = genSpoutIds(spoutNodes);
- Map<Group, String> boltIds = genBoltIds(mergedGroups);
-
- for(SpoutNode sn: spoutNodes) {
- Integer parallelism = parallelisms.get(grouper.nodeGroup(sn));
- if(sn.type == SpoutNode.SpoutType.DRPC) {
- builder.setBatchPerTupleSpout(spoutIds.get(sn), sn.streamId,
- (IRichSpout) sn.spout, parallelism, batchGroupMap.get(sn));
- } else {
- ITridentSpout s;
- if(sn.spout instanceof IBatchSpout) {
- s = new BatchSpoutExecutor((IBatchSpout)sn.spout);
- } else if(sn.spout instanceof ITridentSpout) {
- s = (ITridentSpout) sn.spout;
- } else {
- throw new RuntimeException("Regular rich spouts not supported yet... try wrapping in a RichSpoutBatchExecutor");
- // TODO: handle regular rich spout without batches (need lots of updates to support this throughout)
- }
- builder.setSpout(spoutIds.get(sn), sn.streamId, sn.txId, s, parallelism, batchGroupMap.get(sn));
- }
- }
-
- for(Group g: mergedGroups) {
- if(!isSpoutGroup(g)) {
- Integer p = parallelisms.get(g);
- Map<String, String> streamToGroup = getOutputStreamBatchGroups(g, batchGroupMap);
- BoltDeclarer d = builder.setBolt(boltIds.get(g), new SubtopologyBolt(graph, g.nodes, batchGroupMap), p,
- committerBatches(g, batchGroupMap), streamToGroup);
- Collection<PartitionNode> inputs = uniquedSubscriptions(externalGroupInputs(g));
- for(PartitionNode n: inputs) {
- Node parent = TridentUtils.getParent(graph, n);
- String componentId;
- if(parent instanceof SpoutNode) {
- componentId = spoutIds.get(parent);
- } else {
- componentId = boltIds.get(grouper.nodeGroup(parent));
- }
- d.grouping(new GlobalStreamId(componentId, n.streamId), n.thriftGrouping);
- }
- }
- }
-
- return builder.buildTopology();
- }
-
- private static void completeDRPC(DefaultDirectedGraph<Node, IndexedEdge> graph, Map<String, List<Node>> colocate, UniqueIdGen gen) {
- List<Set<Node>> connectedComponents = new ConnectivityInspector<Node, IndexedEdge>(graph).connectedSets();
-
- for(Set<Node> g: connectedComponents) {
- checkValidJoins(g);
- }
-
- TridentTopology helper = new TridentTopology(graph, colocate, gen);
- for(Set<Node> g: connectedComponents) {
- SpoutNode drpcNode = getDRPCSpoutNode(g);
- if(drpcNode!=null) {
- Stream lastStream = new Stream(helper, null, getLastAddedNode(g));
- Stream s = new Stream(helper, null, drpcNode);
- helper.multiReduce(
- s.project(new Fields("return-info"))
- .batchGlobal(),
- lastStream.batchGlobal(),
- new ReturnResultsReducer(),
- new Fields());
- }
- }
- }
-
- private static Node getLastAddedNode(Collection<Node> g) {
- Node ret = null;
- for(Node n: g) {
- if(ret==null || n.creationIndex > ret.creationIndex) {
- ret = n;
- }
- }
- return ret;
- }
-
- //returns null if it's not a drpc group
- private static SpoutNode getDRPCSpoutNode(Collection<Node> g) {
- for(Node n: g) {
- if(n instanceof SpoutNode) {
- SpoutNode.SpoutType type = ((SpoutNode) n).type;
- if(type==SpoutNode.SpoutType.DRPC) {
- return (SpoutNode) n;
- }
- }
- }
- return null;
- }
-
- private static void checkValidJoins(Collection<Node> g) {
- boolean hasDRPCSpout = false;
- boolean hasBatchSpout = false;
- for(Node n: g) {
- if(n instanceof SpoutNode) {
- SpoutNode.SpoutType type = ((SpoutNode) n).type;
- if(type==SpoutNode.SpoutType.BATCH) {
- hasBatchSpout = true;
- } else if(type==SpoutNode.SpoutType.DRPC) {
- hasDRPCSpout = true;
- }
- }
- }
- if(hasBatchSpout && hasDRPCSpout) {
- throw new RuntimeException("Cannot join DRPC stream with streams originating from other spouts");
- }
- }
-
- private static boolean isSpoutGroup(Group g) {
- return g.nodes.size() == 1 && g.nodes.iterator().next() instanceof SpoutNode;
- }
-
- private static Collection<PartitionNode> uniquedSubscriptions(Set<PartitionNode> subscriptions) {
- Map<String, PartitionNode> ret = new HashMap();
- for(PartitionNode n: subscriptions) {
- PartitionNode curr = ret.get(n.streamId);
- if(curr!=null && !curr.thriftGrouping.equals(n.thriftGrouping)) {
- throw new RuntimeException("Multiple subscriptions to the same stream with different groupings. Should be impossible since that is explicitly guarded against.");
- }
- ret.put(n.streamId, n);
- }
- return ret.values();
- }
-
- private static Map<Node, String> genSpoutIds(Collection<SpoutNode> spoutNodes) {
- Map<Node, String> ret = new HashMap();
- int ctr = 0;
- for(SpoutNode n: spoutNodes) {
- ret.put(n, "spout" + ctr);
- ctr++;
- }
- return ret;
- }
-
- private static Map<Group, String> genBoltIds(Collection<Group> groups) {
- Map<Group, String> ret = new HashMap();
- int ctr = 0;
- for(Group g: groups) {
- if(!isSpoutGroup(g)) {
- List<String> name = new ArrayList();
- name.add("b");
- name.add("" + ctr);
- String groupName = getGroupName(g);
- if(groupName!=null && !groupName.isEmpty()) {
- name.add(getGroupName(g));
- }
- ret.put(g, Utils.join(name, "-"));
- ctr++;
- }
- }
- return ret;
- }
-
- private static String getGroupName(Group g) {
- TreeMap<Integer, String> sortedNames = new TreeMap();
- for(Node n: g.nodes) {
- if(n.name!=null) {
- sortedNames.put(n.creationIndex, n.name);
- }
- }
- List<String> names = new ArrayList<String>();
- String prevName = null;
- for(String n: sortedNames.values()) {
- if(prevName==null || !n.equals(prevName)) {
- prevName = n;
- names.add(n);
- }
- }
- return Utils.join(names, "-");
- }
-
- private static Map<String, String> getOutputStreamBatchGroups(Group g, Map<Node, String> batchGroupMap) {
- Map<String, String> ret = new HashMap();
- Set<PartitionNode> externalGroupOutputs = externalGroupOutputs(g);
- for(PartitionNode n: externalGroupOutputs) {
- ret.put(n.streamId, batchGroupMap.get(n));
- }
- return ret;
- }
-
- private static Set<String> committerBatches(Group g, Map<Node, String> batchGroupMap) {
- Set<String> ret = new HashSet();
- for(Node n: g.nodes) {
- if(n instanceof ProcessorNode) {
- if(((ProcessorNode) n).committer) {
- ret.add(batchGroupMap.get(n));
- }
- }
- }
- return ret;
- }
-
- private static Map<Group, Integer> getGroupParallelisms(DirectedGraph<Node, IndexedEdge> graph, GraphGrouper grouper, Collection<Group> groups) {
- UndirectedGraph<Group, Object> equivs = new Pseudograph<Group, Object>(Object.class);
- for(Group g: groups) {
- equivs.addVertex(g);
- }
- for(Group g: groups) {
- for(PartitionNode n: externalGroupInputs(g)) {
- if(isIdentityPartition(n)) {
- Node parent = TridentUtils.getParent(graph, n);
- Group parentGroup = grouper.nodeGroup(parent);
- if(parentGroup!=null && !parentGroup.equals(g)) {
- equivs.addEdge(parentGroup, g);
- }
- }
- }
- }
-
- Map<Group, Integer> ret = new HashMap();
- List<Set<Group>> equivGroups = new ConnectivityInspector<Group, Object>(equivs).connectedSets();
- for(Set<Group> equivGroup: equivGroups) {
- Integer fixedP = getFixedParallelism(equivGroup);
- Integer maxP = getMaxParallelism(equivGroup);
- if(fixedP!=null && maxP!=null && maxP < fixedP) {
- throw new RuntimeException("Parallelism is fixed to " + fixedP + " but max parallelism is less than that: " + maxP);
- }
-
-
- Integer p = 1;
- for(Group g: equivGroup) {
- for(Node n: g.nodes) {
- if(n.parallelismHint!=null) {
- p = Math.max(p, n.parallelismHint);
- }
- }
- }
- if(maxP!=null) p = Math.min(maxP, p);
-
- if(fixedP!=null) p = fixedP;
- for(Group g: equivGroup) {
- ret.put(g, p);
- }
- }
- return ret;
- }
-
- private static Integer getMaxParallelism(Set<Group> groups) {
- Integer ret = null;
- for(Group g: groups) {
- if(isSpoutGroup(g)) {
- SpoutNode n = (SpoutNode) g.nodes.iterator().next();
- Map conf = getSpoutComponentConfig(n.spout);
- if(conf==null) conf = new HashMap();
- Number maxP = (Number) conf.get(Config.TOPOLOGY_MAX_TASK_PARALLELISM);
- if(maxP!=null) {
- if(ret==null) ret = maxP.intValue();
- else ret = Math.min(ret, maxP.intValue());
- }
- }
- }
- return ret;
- }
-
- private static Map getSpoutComponentConfig(Object spout) {
- if(spout instanceof IRichSpout) {
- return ((IRichSpout) spout).getComponentConfiguration();
- } else if (spout instanceof IBatchSpout) {
- return ((IBatchSpout) spout).getComponentConfiguration();
- } else {
- return ((ITridentSpout) spout).getComponentConfiguration();
- }
- }
-
- private static Integer getFixedParallelism(Set<Group> groups) {
- Integer ret = null;
- for(Group g: groups) {
- for(Node n: g.nodes) {
- if(n.stateInfo != null && n.stateInfo.spec.requiredNumPartitions!=null) {
- int reqPartitions = n.stateInfo.spec.requiredNumPartitions;
- if(ret!=null && ret!=reqPartitions) {
- throw new RuntimeException("Cannot have one group have fixed parallelism of two different values");
- }
- ret = reqPartitions;
- }
- }
- }
- return ret;
- }
-
- private static boolean isIdentityPartition(PartitionNode n) {
- Grouping g = n.thriftGrouping;
- if(g.is_set_custom_serialized()) {
- CustomStreamGrouping csg = (CustomStreamGrouping) Utils.deserialize(g.get_custom_serialized());
- return csg instanceof IdentityGrouping;
- }
- return false;
- }
-
- private static void addEdge(DirectedGraph g, Object source, Object target, int index) {
- g.addEdge(source, target, new IndexedEdge(source, target, index));
- }
-
- private Node makeIdentityNode(Fields allOutputFields) {
- return new ProcessorNode(getUniqueStreamId(), null, allOutputFields, new Fields(),
- new EachProcessor(new Fields(), new FilterExecutor(new TrueFilter())));
- }
-
- private static List<PartitionNode> extraPartitionInputs(Group g) {
- List<PartitionNode> ret = new ArrayList();
- Set<PartitionNode> inputs = externalGroupInputs(g);
- Map<String, List<PartitionNode>> grouped = new HashMap();
- for(PartitionNode n: inputs) {
- if(!grouped.containsKey(n.streamId)) {
- grouped.put(n.streamId, new ArrayList());
- }
- grouped.get(n.streamId).add(n);
- }
- for(List<PartitionNode> group: grouped.values()) {
- PartitionNode anchor = group.get(0);
- for(int i=1; i<group.size(); i++) {
- PartitionNode n = group.get(i);
- if(!n.thriftGrouping.equals(anchor.thriftGrouping)) {
- ret.add(n);
- }
- }
- }
- return ret;
- }
-
- private static Set<PartitionNode> externalGroupInputs(Group g) {
- Set<PartitionNode> ret = new HashSet();
- for(Node n: g.incomingNodes()) {
- if(n instanceof PartitionNode) {
- ret.add((PartitionNode) n);
- }
- }
- return ret;
- }
-
- private static Set<PartitionNode> externalGroupOutputs(Group g) {
- Set<PartitionNode> ret = new HashSet();
- for(Node n: g.outgoingNodes()) {
- if(n instanceof PartitionNode) {
- ret.add((PartitionNode) n);
- }
- }
- return ret;
- }
-
- private static PartitionNode makeIdentityPartition(Node basis) {
- return new PartitionNode(basis.streamId, basis.name, basis.allOutputFields,
- Grouping.custom_serialized(Utils.serialize(new IdentityGrouping())));
- }
-
-
- protected String getUniqueStreamId() {
- return _gen.getUniqueStreamId();
- }
-
- protected String getUniqueStateId() {
- return _gen.getUniqueStateId();
- }
-
- protected void registerNode(Node n) {
- _graph.addVertex(n);
- if(n.stateInfo!=null) {
- String id = n.stateInfo.id;
- if(!_colocate.containsKey(id)) {
- _colocate.put(id, new ArrayList());
- }
- _colocate.get(id).add(n);
- }
- }
-
- protected Stream addNode(Node n) {
- registerNode(n);
- return new Stream(this, n.name, n);
- }
-
- protected void registerSourcedNode(List<Stream> sources, Node newNode) {
- registerNode(newNode);
- int streamIndex = 0;
- for(Stream s: sources) {
- _graph.addEdge(s._node, newNode, new IndexedEdge(s._node, newNode, streamIndex));
- streamIndex++;
- }
- }
-
- protected Stream addSourcedNode(List<Stream> sources, Node newNode) {
- registerSourcedNode(sources, newNode);
- return new Stream(this, newNode.name, newNode);
- }
-
- protected TridentState addSourcedStateNode(List<Stream> sources, Node newNode) {
- registerSourcedNode(sources, newNode);
- return new TridentState(this, newNode);
- }
-
- protected Stream addSourcedNode(Stream source, Node newNode) {
- return addSourcedNode(Arrays.asList(source), newNode);
- }
-
- protected TridentState addSourcedStateNode(Stream source, Node newNode) {
- return addSourcedStateNode(Arrays.asList(source), newNode);
- }
-
- private static List<Fields> getAllOutputFields(List streams) {
- List<Fields> ret = new ArrayList<Fields>();
- for(Object o: streams) {
- ret.add(((IAggregatableStream) o).getOutputFields());
- }
- return ret;
- }
-
-
- private static List<GroupedStream> groupedStreams(List<Stream> streams, List<Fields> joinFields) {
- List<GroupedStream> ret = new ArrayList<GroupedStream>();
- for(int i=0; i<streams.size(); i++) {
- ret.add(streams.get(i).groupBy(joinFields.get(i)));
- }
- return ret;
- }
-
- private static List<Fields> strippedInputFields(List<Stream> streams, List<Fields> joinFields) {
- List<Fields> ret = new ArrayList<Fields>();
- for(int i=0; i<streams.size(); i++) {
- ret.add(TridentUtils.fieldsSubtract(streams.get(i).getOutputFields(), joinFields.get(i)));
- }
- return ret;
- }
-
- private static List<JoinType> repeat(int n, JoinType type) {
- List<JoinType> ret = new ArrayList<JoinType>();
- for(int i=0; i<n; i++) {
- ret.add(type);
- }
- return ret;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/drpc/ReturnResultsReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/drpc/ReturnResultsReducer.java b/jstorm-client/src/main/java/storm/trident/drpc/ReturnResultsReducer.java
deleted file mode 100644
index e89719e..0000000
--- a/jstorm-client/src/main/java/storm/trident/drpc/ReturnResultsReducer.java
+++ /dev/null
@@ -1,96 +0,0 @@
-package storm.trident.drpc;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang.builder.ToStringBuilder;
-import org.apache.thrift7.TException;
-
-import storm.trident.drpc.ReturnResultsReducer.ReturnResultsState;
-import storm.trident.operation.MultiReducer;
-import storm.trident.operation.TridentCollector;
-import storm.trident.operation.TridentMultiReducerContext;
-import storm.trident.tuple.TridentTuple;
-import backtype.storm.Config;
-import backtype.storm.drpc.DRPCInvocationsClient;
-import backtype.storm.generated.DistributedRPCInvocations;
-import backtype.storm.utils.ServiceRegistry;
-import backtype.storm.utils.Utils;
-
-
-public class ReturnResultsReducer implements MultiReducer<ReturnResultsState> {
- public static class ReturnResultsState {
- List<TridentTuple> results = new ArrayList<TridentTuple>();
- String returnInfo;
-
- @Override
- public String toString() {
- return ToStringBuilder.reflectionToString(this);
- }
- }
- boolean local;
-
- Map<List, DRPCInvocationsClient> _clients = new HashMap<List, DRPCInvocationsClient>();
-
-
- @Override
- public void prepare(Map conf, TridentMultiReducerContext context) {
- local = conf.get(Config.STORM_CLUSTER_MODE).equals("local");
- }
-
- @Override
- public ReturnResultsState init(TridentCollector collector) {
- return new ReturnResultsState();
- }
-
- @Override
- public void execute(ReturnResultsState state, int streamIndex, TridentTuple input, TridentCollector collector) {
- if(streamIndex==0) {
- state.returnInfo = input.getString(0);
- } else {
- state.results.add(input);
- }
- }
-
- @Override
- public void complete(ReturnResultsState state, TridentCollector collector) {
- // only one of the multireducers will receive the tuples
- if(state.returnInfo!=null) {
- String result = Utils.to_json(state.results);
- Map retMap = (Map) Utils.from_json(state.returnInfo);
- final String host = (String) retMap.get("host");
- final int port = Utils.getInt(retMap.get("port"));
- String id = (String) retMap.get("id");
- DistributedRPCInvocations.Iface client;
- if(local) {
- client = (DistributedRPCInvocations.Iface) ServiceRegistry.getService(host);
- } else {
- List server = new ArrayList() {{
- add(host);
- add(port);
- }};
-
- if(!_clients.containsKey(server)) {
- _clients.put(server, new DRPCInvocationsClient(host, port));
- }
- client = _clients.get(server);
- }
-
- try {
- client.result(id, result);
- } catch(TException e) {
- collector.reportError(e);
- }
- }
- }
-
- @Override
- public void cleanup() {
- for(DRPCInvocationsClient c: _clients.values()) {
- c.close();
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/ChainedAggregatorDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/ChainedAggregatorDeclarer.java b/jstorm-client/src/main/java/storm/trident/fluent/ChainedAggregatorDeclarer.java
deleted file mode 100644
index de8fe9c..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/ChainedAggregatorDeclarer.java
+++ /dev/null
@@ -1,166 +0,0 @@
-package storm.trident.fluent;
-
-import backtype.storm.tuple.Fields;
-import java.util.ArrayList;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import storm.trident.Stream;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.impl.ChainedAggregatorImpl;
-import storm.trident.operation.impl.CombinerAggregatorCombineImpl;
-import storm.trident.operation.impl.CombinerAggregatorInitImpl;
-import storm.trident.operation.impl.ReducerAggregatorImpl;
-import storm.trident.operation.impl.SingleEmitAggregator;
-import storm.trident.operation.impl.SingleEmitAggregator.BatchToPartition;
-import storm.trident.tuple.ComboList;
-
-
-public class ChainedAggregatorDeclarer implements ChainedFullAggregatorDeclarer, ChainedPartitionAggregatorDeclarer {
- public static interface AggregationPartition {
- Stream partition(Stream input);
- }
-
- private static enum AggType {
- PARTITION,
- FULL,
- FULL_COMBINE
- }
-
- // inputFields can be equal to outFields, but multiple aggregators cannot have intersection outFields
- private static class AggSpec {
- Fields inFields;
- Aggregator agg;
- Fields outFields;
-
- public AggSpec(Fields inFields, Aggregator agg, Fields outFields) {
- this.inFields = inFields;
- this.agg = agg;
- this.outFields = outFields;
- }
- }
-
- List<AggSpec> _aggs = new ArrayList<AggSpec>();
- IAggregatableStream _stream;
- AggType _type = null;
- GlobalAggregationScheme _globalScheme;
-
- public ChainedAggregatorDeclarer(IAggregatableStream stream, GlobalAggregationScheme globalScheme) {
- _stream = stream;
- _globalScheme = globalScheme;
- }
-
- public Stream chainEnd() {
- Fields[] inputFields = new Fields[_aggs.size()];
- Aggregator[] aggs = new Aggregator[_aggs.size()];
- int[] outSizes = new int[_aggs.size()];
- List<String> allOutFields = new ArrayList<String>();
- Set<String> allInFields = new HashSet<String>();
- for(int i=0; i<_aggs.size(); i++) {
- AggSpec spec = _aggs.get(i);
- Fields infields = spec.inFields;
- if(infields==null) infields = new Fields();
- Fields outfields = spec.outFields;
- if(outfields==null) outfields = new Fields();
-
- inputFields[i] = infields;
- aggs[i] = spec.agg;
- outSizes[i] = outfields.size();
- allOutFields.addAll(outfields.toList());
- allInFields.addAll(infields.toList());
- }
- if(new HashSet(allOutFields).size() != allOutFields.size()) {
- throw new IllegalArgumentException("Output fields for chained aggregators must be distinct: " + allOutFields.toString());
- }
-
- Fields inFields = new Fields(new ArrayList<String>(allInFields));
- Fields outFields = new Fields(allOutFields);
- Aggregator combined = new ChainedAggregatorImpl(aggs, inputFields, new ComboList.Factory(outSizes));
-
- if(_type!=AggType.FULL) {
- _stream = _stream.partitionAggregate(inFields, combined, outFields);
- }
- if(_type!=AggType.PARTITION) {
- _stream = _globalScheme.aggPartition(_stream);
- BatchToPartition singleEmit = _globalScheme.singleEmitPartitioner();
- Aggregator toAgg = combined;
- if(singleEmit!=null) {
- toAgg = new SingleEmitAggregator(combined, singleEmit);
- }
- // this assumes that inFields and outFields are the same for combineragg
- // assumption also made above
- _stream = _stream.partitionAggregate(inFields, toAgg, outFields);
- }
- return _stream.toStream();
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(Aggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- _type = AggType.PARTITION;
- _aggs.add(new AggSpec(inputFields, agg, functionFields));
- return this;
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(CombinerAggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- initCombiner(inputFields, agg, functionFields);
- return partitionAggregate(functionFields, new CombinerAggregatorCombineImpl(agg), functionFields);
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(ReducerAggregator agg, Fields functionFields) {
- return partitionAggregate(null, agg, functionFields);
- }
-
- public ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return partitionAggregate(inputFields, new ReducerAggregatorImpl(agg), functionFields);
- }
-
- public ChainedFullAggregatorDeclarer aggregate(Aggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public ChainedFullAggregatorDeclarer aggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- return aggregate(inputFields, agg, functionFields, false);
- }
-
- private ChainedFullAggregatorDeclarer aggregate(Fields inputFields, Aggregator agg, Fields functionFields, boolean isCombiner) {
- if(isCombiner) {
- if(_type == null) {
- _type = AggType.FULL_COMBINE;
- }
- } else {
- _type = AggType.FULL;
- }
- _aggs.add(new AggSpec(inputFields, agg, functionFields));
- return this;
- }
-
- public ChainedFullAggregatorDeclarer aggregate(CombinerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public ChainedFullAggregatorDeclarer aggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- initCombiner(inputFields, agg, functionFields);
- return aggregate(functionFields, new CombinerAggregatorCombineImpl(agg), functionFields, true);
- }
-
- public ChainedFullAggregatorDeclarer aggregate(ReducerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public ChainedFullAggregatorDeclarer aggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return aggregate(inputFields, new ReducerAggregatorImpl(agg), functionFields);
- }
-
- private void initCombiner(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- _stream = _stream.each(inputFields, new CombinerAggregatorInitImpl(agg), functionFields);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/ChainedFullAggregatorDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/ChainedFullAggregatorDeclarer.java b/jstorm-client/src/main/java/storm/trident/fluent/ChainedFullAggregatorDeclarer.java
deleted file mode 100644
index 84436a6..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/ChainedFullAggregatorDeclarer.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package storm.trident.fluent;
-
-import backtype.storm.tuple.Fields;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.ReducerAggregator;
-
-public interface ChainedFullAggregatorDeclarer extends IChainedAggregatorDeclarer {
- ChainedFullAggregatorDeclarer aggregate(Aggregator agg, Fields functionFields);
- ChainedFullAggregatorDeclarer aggregate(Fields inputFields, Aggregator agg, Fields functionFields);
- ChainedFullAggregatorDeclarer aggregate(CombinerAggregator agg, Fields functionFields);
- ChainedFullAggregatorDeclarer aggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields);
- ChainedFullAggregatorDeclarer aggregate(ReducerAggregator agg, Fields functionFields);
- ChainedFullAggregatorDeclarer aggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/ChainedPartitionAggregatorDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/ChainedPartitionAggregatorDeclarer.java b/jstorm-client/src/main/java/storm/trident/fluent/ChainedPartitionAggregatorDeclarer.java
deleted file mode 100644
index 00e2c5a..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/ChainedPartitionAggregatorDeclarer.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package storm.trident.fluent;
-
-import backtype.storm.tuple.Fields;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.ReducerAggregator;
-
-public interface ChainedPartitionAggregatorDeclarer extends IChainedAggregatorDeclarer {
- ChainedPartitionAggregatorDeclarer partitionAggregate(Aggregator agg, Fields functionFields);
- ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, Aggregator agg, Fields functionFields);
- ChainedPartitionAggregatorDeclarer partitionAggregate(CombinerAggregator agg, Fields functionFields);
- ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields);
- ChainedPartitionAggregatorDeclarer partitionAggregate(ReducerAggregator agg, Fields functionFields);
- ChainedPartitionAggregatorDeclarer partitionAggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/GlobalAggregationScheme.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/GlobalAggregationScheme.java b/jstorm-client/src/main/java/storm/trident/fluent/GlobalAggregationScheme.java
deleted file mode 100644
index 96f15e9..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/GlobalAggregationScheme.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.fluent;
-
-import storm.trident.operation.impl.SingleEmitAggregator.BatchToPartition;
-
-
-public interface GlobalAggregationScheme<S extends IAggregatableStream> {
- IAggregatableStream aggPartition(S stream); // how to partition for second stage of aggregation
- BatchToPartition singleEmitPartitioner(); // return null if it's not single emit
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/GroupedStream.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/GroupedStream.java b/jstorm-client/src/main/java/storm/trident/fluent/GroupedStream.java
deleted file mode 100644
index ad1e121..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/GroupedStream.java
+++ /dev/null
@@ -1,157 +0,0 @@
-package storm.trident.fluent;
-
-import backtype.storm.tuple.Fields;
-import storm.trident.Stream;
-import storm.trident.TridentState;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.operation.Function;
-import storm.trident.operation.ReducerAggregator;
-import storm.trident.operation.impl.GroupedAggregator;
-import storm.trident.operation.impl.SingleEmitAggregator.BatchToPartition;
-import storm.trident.state.QueryFunction;
-import storm.trident.state.StateFactory;
-import storm.trident.state.StateSpec;
-import storm.trident.state.map.MapCombinerAggStateUpdater;
-import storm.trident.state.map.MapReducerAggStateUpdater;
-import storm.trident.util.TridentUtils;
-
-
-public class GroupedStream implements IAggregatableStream, GlobalAggregationScheme<GroupedStream> {
- Fields _groupFields;
- Stream _stream;
-
- public GroupedStream(Stream stream, Fields groupFields) {
- _groupFields = groupFields;
- _stream = stream;
- }
-
- public GroupedStream name(String name) {
- return new GroupedStream(_stream.name(name), _groupFields);
- }
-
- public ChainedAggregatorDeclarer chainedAgg() {
- return new ChainedAggregatorDeclarer(this, this);
- }
-
- public Stream aggregate(Aggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- return new ChainedAggregatorDeclarer(this, this)
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream aggregate(CombinerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- return new ChainedAggregatorDeclarer(this, this)
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public Stream aggregate(ReducerAggregator agg, Fields functionFields) {
- return aggregate(null, agg, functionFields);
- }
-
- public Stream aggregate(Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return new ChainedAggregatorDeclarer(this, this)
- .aggregate(inputFields, agg, functionFields)
- .chainEnd();
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(spec, null, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), inputFields, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, Fields inputFields, CombinerAggregator agg, Fields functionFields) {
- return aggregate(inputFields, agg, functionFields)
- .partitionPersist(spec,
- TridentUtils.fieldsUnion(_groupFields, functionFields),
- new MapCombinerAggStateUpdater(agg, _groupFields, functionFields),
- TridentUtils.fieldsConcat(_groupFields, functionFields));
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), inputFields, agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, Fields inputFields, ReducerAggregator agg, Fields functionFields) {
- return _stream.partitionBy(_groupFields)
- .partitionPersist(spec,
- TridentUtils.fieldsUnion(_groupFields, inputFields),
- new MapReducerAggStateUpdater(agg, _groupFields, inputFields),
- TridentUtils.fieldsConcat(_groupFields, functionFields));
- }
-
- public Stream stateQuery(TridentState state, Fields inputFields, QueryFunction function, Fields functionFields) {
- return _stream.partitionBy(_groupFields)
- .stateQuery(state,
- inputFields,
- function,
- functionFields);
- }
-
- public TridentState persistentAggregate(StateFactory stateFactory, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(new StateSpec(stateFactory), agg, functionFields);
- }
-
- public TridentState persistentAggregate(StateSpec spec, ReducerAggregator agg, Fields functionFields) {
- return persistentAggregate(spec, null, agg, functionFields);
- }
-
- public Stream stateQuery(TridentState state, QueryFunction function, Fields functionFields) {
- return stateQuery(state, null, function, functionFields);
- }
-
- @Override
- public IAggregatableStream each(Fields inputFields, Function function, Fields functionFields) {
- Stream s = _stream.each(inputFields, function, functionFields);
- return new GroupedStream(s, _groupFields);
- }
-
- @Override
- public IAggregatableStream partitionAggregate(Fields inputFields, Aggregator agg, Fields functionFields) {
- Aggregator groupedAgg = new GroupedAggregator(agg, _groupFields, inputFields, functionFields.size());
- Fields allInFields = TridentUtils.fieldsUnion(_groupFields, inputFields);
- Fields allOutFields = TridentUtils.fieldsConcat(_groupFields, functionFields);
- Stream s = _stream.partitionAggregate(allInFields, groupedAgg, allOutFields);
- return new GroupedStream(s, _groupFields);
- }
-
- @Override
- public IAggregatableStream aggPartition(GroupedStream s) {
- return new GroupedStream(s._stream.partitionBy(_groupFields), _groupFields);
- }
-
- @Override
- public Stream toStream() {
- return _stream;
- }
-
- @Override
- public Fields getOutputFields() {
- return _stream.getOutputFields();
- }
-
- public Fields getGroupFields() {
- return _groupFields;
- }
-
- @Override
- public BatchToPartition singleEmitPartitioner() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/IAggregatableStream.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/IAggregatableStream.java b/jstorm-client/src/main/java/storm/trident/fluent/IAggregatableStream.java
deleted file mode 100644
index e10852e..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/IAggregatableStream.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.fluent;
-
-import backtype.storm.tuple.Fields;
-import storm.trident.Stream;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.Function;
-import storm.trident.operation.impl.SingleEmitAggregator.BatchToPartition;
-
-public interface IAggregatableStream {
- IAggregatableStream each(Fields inputFields, Function function, Fields functionFields);
- IAggregatableStream partitionAggregate(Fields inputFields, Aggregator agg, Fields functionFields);
- Stream toStream();
- Fields getOutputFields();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/IChainedAggregatorDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/IChainedAggregatorDeclarer.java b/jstorm-client/src/main/java/storm/trident/fluent/IChainedAggregatorDeclarer.java
deleted file mode 100644
index a42dfbe..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/IChainedAggregatorDeclarer.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package storm.trident.fluent;
-
-import storm.trident.Stream;
-
-public interface IChainedAggregatorDeclarer {
- Stream chainEnd();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/fluent/UniqueIdGen.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/fluent/UniqueIdGen.java b/jstorm-client/src/main/java/storm/trident/fluent/UniqueIdGen.java
deleted file mode 100644
index 64ad621..0000000
--- a/jstorm-client/src/main/java/storm/trident/fluent/UniqueIdGen.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package storm.trident.fluent;
-
-public class UniqueIdGen {
- int _streamCounter = 0;
-
- public String getUniqueStreamId() {
- _streamCounter++;
- return "s" + _streamCounter;
- }
-
- int _stateCounter = 0;
-
- public String getUniqueStateId() {
- _stateCounter++;
- return "state" + _stateCounter;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/graph/GraphGrouper.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/graph/GraphGrouper.java b/jstorm-client/src/main/java/storm/trident/graph/GraphGrouper.java
deleted file mode 100644
index b107269..0000000
--- a/jstorm-client/src/main/java/storm/trident/graph/GraphGrouper.java
+++ /dev/null
@@ -1,106 +0,0 @@
-package storm.trident.graph;
-
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-import org.jgrapht.DirectedGraph;
-import storm.trident.planner.Node;
-import storm.trident.util.IndexedEdge;
-
-
-public class GraphGrouper {
-
- DirectedGraph<Node, IndexedEdge> graph;
- Set<Group> currGroups;
- Map<Node, Group> groupIndex = new HashMap();
-
- public GraphGrouper(DirectedGraph<Node, IndexedEdge> graph, Collection<Group> initialGroups) {
- this.graph = graph;
- this.currGroups = new HashSet(initialGroups);
- reindex();
- }
-
- public Collection<Group> getAllGroups() {
- return currGroups;
- }
-
- public void addGroup(Group g) {
- currGroups.add(g);
- }
-
- public void reindex() {
- groupIndex.clear();
- for(Group g: currGroups) {
- for(Node n: g.nodes) {
- groupIndex.put(n, g);
- }
- }
- }
-
- public void mergeFully() {
- boolean somethingHappened = true;
- while(somethingHappened) {
- somethingHappened = false;
- for(Group g: currGroups) {
- Collection<Group> outgoingGroups = outgoingGroups(g);
- if(outgoingGroups.size()==1) {
- Group out = outgoingGroups.iterator().next();
- if(out!=null) {
- merge(g, out);
- somethingHappened = true;
- break;
- }
- }
-
- Collection<Group> incomingGroups = incomingGroups(g);
- if(incomingGroups.size()==1) {
- Group in = incomingGroups.iterator().next();
- if(in!=null) {
- merge(g, in);
- somethingHappened = true;
- break;
- }
- }
- }
- }
- }
-
- private void merge(Group g1, Group g2) {
- Group newGroup = new Group(g1, g2);
- currGroups.remove(g1);
- currGroups.remove(g2);
- currGroups.add(newGroup);
- for(Node n: newGroup.nodes) {
- groupIndex.put(n, newGroup);
- }
- }
-
- public Collection<Group> outgoingGroups(Group g) {
- Set<Group> ret = new HashSet();
- for(Node n: g.outgoingNodes()) {
- Group other = nodeGroup(n);
- if(other==null || !other.equals(g)) {
- ret.add(other);
- }
- }
- return ret;
- }
-
- public Collection<Group> incomingGroups(Group g) {
- Set<Group> ret = new HashSet();
- for(Node n: g.incomingNodes()) {
- Group other = nodeGroup(n);
- if(other==null || !other.equals(g)) {
- ret.add(other);
- }
- }
- return ret;
- }
-
- public Group nodeGroup(Node n) {
- return groupIndex.get(n);
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/graph/Group.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/graph/Group.java b/jstorm-client/src/main/java/storm/trident/graph/Group.java
deleted file mode 100644
index c329ad6..0000000
--- a/jstorm-client/src/main/java/storm/trident/graph/Group.java
+++ /dev/null
@@ -1,89 +0,0 @@
-package storm.trident.graph;
-
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import java.util.UUID;
-
-import org.jgrapht.DirectedGraph;
-
-import storm.trident.planner.Node;
-import storm.trident.util.IndexedEdge;
-import storm.trident.util.TridentUtils;
-
-
-public class Group {
- public Set<Node> nodes = new HashSet<Node>();
- private DirectedGraph<Node, IndexedEdge> graph;
- private String id;
-
- public Group(DirectedGraph graph, List<Node> nodes) {
- init(graph);
- this.nodes.addAll(nodes);
- this.graph = graph;
- }
-
- public Group(DirectedGraph graph, Node n) {
- this(graph, Arrays.asList(n));
- }
-
- public Group(Group g1, Group g2) {
- init(g1.graph);
- nodes.addAll(g1.nodes);
- nodes.addAll(g2.nodes);
- }
-
- private void init(DirectedGraph graph) {
- this.graph = graph;
- this.id = UUID.randomUUID().toString();
- }
-
- public Set<Node> outgoingNodes() {
- Set<Node> ret = new HashSet<Node>();
- for(Node n: nodes) {
- ret.addAll(TridentUtils.getChildren(graph, n));
- }
- return ret;
- }
-
- public Set<Node> incomingNodes() {
- Set<Node> ret = new HashSet<Node>();
- for(Node n: nodes) {
- ret.addAll(TridentUtils.getParents(graph, n));
- }
- return ret;
- }
-
- @Override
- public int hashCode() {
- final int prime = 31;
- int result = 1;
- result = prime * result + ((id == null) ? 0 : id.hashCode());
- return result;
- }
-
- @Override
- public boolean equals(Object obj) {
- if (this == obj)
- return true;
- if (obj == null)
- return false;
- if (getClass() != obj.getClass())
- return false;
- Group other = (Group) obj;
- if (id == null) {
- if (other.id != null)
- return false;
- } else if (!id.equals(other.id))
- return false;
- return true;
- }
-
-
-
- @Override
- public String toString() {
- return nodes.toString();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/Aggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/Aggregator.java b/jstorm-client/src/main/java/storm/trident/operation/Aggregator.java
deleted file mode 100644
index 5181703..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/Aggregator.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.operation;
-
-import storm.trident.tuple.TridentTuple;
-
-public interface Aggregator<T> extends Operation {
- T init(Object batchId, TridentCollector collector);
- void aggregate(T val, TridentTuple tuple, TridentCollector collector);
- void complete(T val, TridentCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/Assembly.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/Assembly.java b/jstorm-client/src/main/java/storm/trident/operation/Assembly.java
deleted file mode 100644
index 17aaca2..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/Assembly.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.operation;
-
-import storm.trident.Stream;
-
-
-public interface Assembly {
- Stream apply(Stream input);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/BaseAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/BaseAggregator.java b/jstorm-client/src/main/java/storm/trident/operation/BaseAggregator.java
deleted file mode 100644
index c97b84f..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/BaseAggregator.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package storm.trident.operation;
-
-
-public abstract class BaseAggregator<T> extends BaseOperation implements Aggregator<T> {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/BaseFilter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/BaseFilter.java b/jstorm-client/src/main/java/storm/trident/operation/BaseFilter.java
deleted file mode 100644
index d629d0d..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/BaseFilter.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package storm.trident.operation;
-
-
-public abstract class BaseFilter extends BaseOperation implements Filter {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/BaseFunction.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/BaseFunction.java b/jstorm-client/src/main/java/storm/trident/operation/BaseFunction.java
deleted file mode 100644
index 8ff6b05..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/BaseFunction.java
+++ /dev/null
@@ -1,6 +0,0 @@
-package storm.trident.operation;
-
-
-public abstract class BaseFunction extends BaseOperation implements Function {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/BaseMultiReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/BaseMultiReducer.java b/jstorm-client/src/main/java/storm/trident/operation/BaseMultiReducer.java
deleted file mode 100644
index 328205d..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/BaseMultiReducer.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package storm.trident.operation;
-
-import java.util.Map;
-
-public abstract class BaseMultiReducer<T> implements MultiReducer<T> {
-
- @Override
- public void prepare(Map conf, TridentMultiReducerContext context) {
- }
-
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/BaseOperation.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/BaseOperation.java b/jstorm-client/src/main/java/storm/trident/operation/BaseOperation.java
deleted file mode 100644
index df6166d..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/BaseOperation.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package storm.trident.operation;
-
-import java.util.Map;
-
-public class BaseOperation implements Operation {
-
- @Override
- public void prepare(Map conf, TridentOperationContext context) {
- }
-
- @Override
- public void cleanup() {
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/CombinerAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/CombinerAggregator.java b/jstorm-client/src/main/java/storm/trident/operation/CombinerAggregator.java
deleted file mode 100644
index 03933c9..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/CombinerAggregator.java
+++ /dev/null
@@ -1,12 +0,0 @@
-package storm.trident.operation;
-
-import java.io.Serializable;
-import storm.trident.tuple.TridentTuple;
-
-// doesn't manipulate tuples (lists of stuff) so that things like aggregating into
-// cassandra is cleaner (don't need lists everywhere, just store the single value there)
-public interface CombinerAggregator<T> extends Serializable {
- T init(TridentTuple tuple);
- T combine(T val1, T val2);
- T zero();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/EachOperation.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/EachOperation.java b/jstorm-client/src/main/java/storm/trident/operation/EachOperation.java
deleted file mode 100644
index b56fe96..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/EachOperation.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package storm.trident.operation;
-
-public interface EachOperation extends Operation {
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/Filter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/Filter.java b/jstorm-client/src/main/java/storm/trident/operation/Filter.java
deleted file mode 100644
index ea7cbb6..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/Filter.java
+++ /dev/null
@@ -1,8 +0,0 @@
-package storm.trident.operation;
-
-import storm.trident.tuple.TridentTuple;
-
-
-public interface Filter extends EachOperation {
- boolean isKeep(TridentTuple tuple);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/Function.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/Function.java b/jstorm-client/src/main/java/storm/trident/operation/Function.java
deleted file mode 100644
index b58a29d..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/Function.java
+++ /dev/null
@@ -1,7 +0,0 @@
-package storm.trident.operation;
-
-import storm.trident.tuple.TridentTuple;
-
-public interface Function extends EachOperation {
- void execute(TridentTuple tuple, TridentCollector collector);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/GroupedMultiReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/GroupedMultiReducer.java b/jstorm-client/src/main/java/storm/trident/operation/GroupedMultiReducer.java
deleted file mode 100644
index 9223cf7..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/GroupedMultiReducer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.operation;
-
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.tuple.TridentTuple;
-
-
-public interface GroupedMultiReducer<T> extends Serializable {
- void prepare(Map conf, TridentMultiReducerContext context);
- T init(TridentCollector collector, TridentTuple group);
- void execute(T state, int streamIndex, TridentTuple group, TridentTuple input, TridentCollector collector);
- void complete(T state, TridentTuple group, TridentCollector collector);
- void cleanup();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/MultiReducer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/MultiReducer.java b/jstorm-client/src/main/java/storm/trident/operation/MultiReducer.java
deleted file mode 100644
index 520f4b9..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/MultiReducer.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.operation;
-
-import java.io.Serializable;
-import java.util.Map;
-import storm.trident.tuple.TridentTuple;
-
-
-public interface MultiReducer<T> extends Serializable {
- void prepare(Map conf, TridentMultiReducerContext context);
- T init(TridentCollector collector);
- void execute(T state, int streamIndex, TridentTuple input, TridentCollector collector);
- void complete(T state, TridentCollector collector);
- void cleanup();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/Operation.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/Operation.java b/jstorm-client/src/main/java/storm/trident/operation/Operation.java
deleted file mode 100644
index f67281e..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/Operation.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.operation;
-
-import java.io.Serializable;
-import java.util.Map;
-
-public interface Operation extends Serializable {
- void prepare(Map conf, TridentOperationContext context);
- void cleanup();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/ReducerAggregator.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/ReducerAggregator.java b/jstorm-client/src/main/java/storm/trident/operation/ReducerAggregator.java
deleted file mode 100644
index 3b4efca..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/ReducerAggregator.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.operation;
-
-import java.io.Serializable;
-import storm.trident.tuple.TridentTuple;
-
-public interface ReducerAggregator<T> extends Serializable {
- T init();
- T reduce(T curr, TridentTuple tuple);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/TridentCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/TridentCollector.java b/jstorm-client/src/main/java/storm/trident/operation/TridentCollector.java
deleted file mode 100644
index b1a74d1..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/TridentCollector.java
+++ /dev/null
@@ -1,9 +0,0 @@
-package storm.trident.operation;
-
-import java.util.List;
-
-
-public interface TridentCollector {
- void emit(List<Object> values);
- void reportError(Throwable t);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/TridentMultiReducerContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/TridentMultiReducerContext.java b/jstorm-client/src/main/java/storm/trident/operation/TridentMultiReducerContext.java
deleted file mode 100644
index fe0ff04..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/TridentMultiReducerContext.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package storm.trident.operation;
-
-import backtype.storm.tuple.Fields;
-import java.util.List;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-
-public class TridentMultiReducerContext {
- List<TridentTuple.Factory> _factories;
-
- public TridentMultiReducerContext(List<TridentTuple.Factory> factories) {
- _factories = factories;
- }
-
- public ProjectionFactory makeProjectionFactory(int streamIndex, Fields fields) {
- return new ProjectionFactory(_factories.get(streamIndex), fields);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/TridentOperationContext.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/TridentOperationContext.java b/jstorm-client/src/main/java/storm/trident/operation/TridentOperationContext.java
deleted file mode 100644
index 3693125..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/TridentOperationContext.java
+++ /dev/null
@@ -1,48 +0,0 @@
-package storm.trident.operation;
-
-import backtype.storm.metric.api.CombinedMetric;
-import backtype.storm.metric.api.ICombiner;
-import backtype.storm.metric.api.IMetric;
-import backtype.storm.metric.api.IReducer;
-import backtype.storm.metric.api.ReducedMetric;
-import backtype.storm.task.IMetricsContext;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.tuple.Fields;
-import storm.trident.tuple.TridentTuple;
-import storm.trident.tuple.TridentTupleView.ProjectionFactory;
-
-public class TridentOperationContext implements IMetricsContext{
- TridentTuple.Factory _factory;
- TopologyContext _topoContext;
-
- public TridentOperationContext(TopologyContext topoContext, TridentTuple.Factory factory) {
- _factory = factory;
- _topoContext = topoContext;
- }
-
- public TridentOperationContext(TridentOperationContext parent, TridentTuple.Factory factory) {
- this(parent._topoContext, factory);
- }
-
- public ProjectionFactory makeProjectionFactory(Fields fields) {
- return new ProjectionFactory(_factory, fields);
- }
-
- public int numPartitions() {
- return _topoContext.getComponentTasks(_topoContext.getThisComponentId()).size();
- }
-
- public int getPartitionIndex() {
- return _topoContext.getThisTaskIndex();
- }
-
- public <T extends IMetric> T registerMetric(String name, T metric, int timeBucketSizeInSecs) {
- return _topoContext.registerMetric(name, metric, timeBucketSizeInSecs);
- }
- public ReducedMetric registerMetric(String name, IReducer reducer, int timeBucketSizeInSecs) {
- return _topoContext.registerMetric(name, new ReducedMetric(reducer), timeBucketSizeInSecs);
- }
- public CombinedMetric registerMetric(String name, ICombiner combiner, int timeBucketSizeInSecs) {
- return _topoContext.registerMetric(name, new CombinedMetric(combiner), timeBucketSizeInSecs);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/Count.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/Count.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/Count.java
deleted file mode 100644
index e40177e..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/Count.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package storm.trident.operation.builtin;
-
-import storm.trident.operation.CombinerAggregator;
-import storm.trident.tuple.TridentTuple;
-
-
-public class Count implements CombinerAggregator<Long> {
-
- @Override
- public Long init(TridentTuple tuple) {
- return 1L;
- }
-
- @Override
- public Long combine(Long val1, Long val2) {
- return val1 + val2;
- }
-
- @Override
- public Long zero() {
- return 0L;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/Debug.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/Debug.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/Debug.java
deleted file mode 100644
index 34e905c..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/Debug.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package storm.trident.operation.builtin;
-
-import storm.trident.operation.BaseFilter;
-import storm.trident.tuple.TridentTuple;
-
-public class Debug extends BaseFilter {
- private final String name;
-
- public Debug() {
- name = "DEBUG: ";
- }
-
- public Debug(String name) {
- this.name = "DEBUG(" + name + "): ";
- }
-
- @Override
- public boolean isKeep(TridentTuple tuple) {
- System.out.println(name + tuple.toString());
- return true;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/Equals.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/Equals.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/Equals.java
deleted file mode 100644
index c53cfdd..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/Equals.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package storm.trident.operation.builtin;
-
-import storm.trident.operation.BaseFilter;
-import storm.trident.tuple.TridentTuple;
-
-
-public class Equals extends BaseFilter {
-
- @Override
- public boolean isKeep(TridentTuple tuple) {
- for(int i=0; i<tuple.size()-1; i++) {
- Object o1 = tuple.getValue(i);
- Object o2 = tuple.getValue(i+1);
- if (o1 == null) {
- if (o2 != null) {
- return false;
- }
- }else if (o1.equals(o2) == false){
- return false;
- }
-
- }
- return true;
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/FilterNull.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/FilterNull.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/FilterNull.java
deleted file mode 100644
index bed2f1e..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/FilterNull.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package storm.trident.operation.builtin;
-
-import storm.trident.operation.BaseFilter;
-import storm.trident.tuple.TridentTuple;
-
-public class FilterNull extends BaseFilter {
- @Override
- public boolean isKeep(TridentTuple tuple) {
- for(Object o: tuple) {
- if(o==null) return false;
- }
- return true;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/storm/trident/operation/builtin/FirstN.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/storm/trident/operation/builtin/FirstN.java b/jstorm-client/src/main/java/storm/trident/operation/builtin/FirstN.java
deleted file mode 100644
index 412badd..0000000
--- a/jstorm-client/src/main/java/storm/trident/operation/builtin/FirstN.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package storm.trident.operation.builtin;
-
-import backtype.storm.tuple.Fields;
-import java.util.Comparator;
-import java.util.PriorityQueue;
-import storm.trident.Stream;
-import storm.trident.operation.Aggregator;
-import storm.trident.operation.Assembly;
-import storm.trident.operation.BaseAggregator;
-import storm.trident.operation.TridentCollector;
-import storm.trident.tuple.TridentTuple;
-
-
-public class FirstN implements Assembly {
-
- Aggregator _agg;
-
- public FirstN(int n, String sortField) {
- this(n, sortField, false);
- }
-
- public FirstN(int n, String sortField, boolean reverse) {
- if(sortField!=null) {
- _agg = new FirstNSortedAgg(n, sortField, reverse);
- } else {
- _agg = new FirstNAgg(n);
- }
- }
-
- @Override
- public Stream apply(Stream input) {
- Fields outputFields = input.getOutputFields();
- return input.partitionAggregate(outputFields, _agg, outputFields)
- .global()
- .partitionAggregate(outputFields, _agg, outputFields);
- }
-
- public static class FirstNAgg extends BaseAggregator<FirstNAgg.State> {
- int _n;
-
- public FirstNAgg(int n) {
- _n = n;
- }
-
- static class State {
- int emitted = 0;
- }
-
- @Override
- public State init(Object batchId, TridentCollector collector) {
- return new State();
- }
-
- @Override
- public void aggregate(State val, TridentTuple tuple, TridentCollector collector) {
- if(val.emitted < _n) {
- collector.emit(tuple);
- val.emitted++;
- }
- }
-
- @Override
- public void complete(State val, TridentCollector collector) {
- }
-
- }
-
- public static class FirstNSortedAgg extends BaseAggregator<PriorityQueue> {
-
- int _n;
- String _sortField;
- boolean _reverse;
-
- public FirstNSortedAgg(int n, String sortField, boolean reverse) {
- _n = n;
- _sortField = sortField;
- _reverse = reverse;
- }
-
- @Override
- public PriorityQueue init(Object batchId, TridentCollector collector) {
- return new PriorityQueue(_n, new Comparator<TridentTuple>() {
- @Override
- public int compare(TridentTuple t1, TridentTuple t2) {
- Comparable c1 = (Comparable) t1.getValueByField(_sortField);
- Comparable c2 = (Comparable) t2.getValueByField(_sortField);
- int ret = c1.compareTo(c2);
- if(_reverse) ret *= -1;
- return ret;
- }
- });
- }
-
- @Override
- public void aggregate(PriorityQueue state, TridentTuple tuple, TridentCollector collector) {
- state.add(tuple);
- }
-
- @Override
- public void complete(PriorityQueue val, TridentCollector collector) {
- int total = val.size();
- for(int i=0; i<_n && i < total; i++) {
- TridentTuple t = (TridentTuple) val.remove();
- collector.emit(t);
- }
- }
- }
-}
[44/60] [abbrv] [partial] storm git commit: Release 2.0.4-SNAPSHOT
Posted by pt...@apache.org.
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/StormSubmitter.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/StormSubmitter.java b/jstorm-client/src/main/java/backtype/storm/StormSubmitter.java
deleted file mode 100644
index 25a9b4c..0000000
--- a/jstorm-client/src/main/java/backtype/storm/StormSubmitter.java
+++ /dev/null
@@ -1,375 +0,0 @@
-package backtype.storm;
-
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.thrift7.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.generated.AlreadyAliveException;
-import backtype.storm.generated.ClusterSummary;
-import backtype.storm.generated.InvalidTopologyException;
-import backtype.storm.generated.Nimbus;
-import backtype.storm.generated.StormTopology;
-import backtype.storm.generated.SubmitOptions;
-import backtype.storm.generated.TopologyAssignException;
-import backtype.storm.generated.TopologySummary;
-import backtype.storm.utils.BufferFileInputStream;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Use this class to submit topologies to run on the Storm cluster. You should
- * run your program with the "storm jar" command from the command-line, and then
- * use this class to submit your topologies.
- */
-public class StormSubmitter {
- public static Logger LOG = LoggerFactory.getLogger(StormSubmitter.class);
-
- private static Nimbus.Iface localNimbus = null;
-
- public static void setLocalNimbus(Nimbus.Iface localNimbusHandler) {
- StormSubmitter.localNimbus = localNimbusHandler;
- }
-
- /**
- * Submits a topology to run on the cluster. A topology runs forever or
- * until explicitly killed.
- *
- *
- * @param name
- * the name of the storm.
- * @param stormConf
- * the topology-specific configuration. See {@link Config}.
- * @param topology
- * the processing to execute.
- * @throws AlreadyAliveException
- * if a topology with this name is already running
- * @throws InvalidTopologyException
- * if an invalid topology was submitted
- */
- public static void submitTopology(String name, Map stormConf,
- StormTopology topology) throws AlreadyAliveException,
- InvalidTopologyException {
- submitTopology(name, stormConf, topology, null);
- }
-
- public static void submitTopology(String name, Map stormConf,
- StormTopology topology, SubmitOptions opts, List<File> jarFiles)
- throws AlreadyAliveException, InvalidTopologyException {
- if (jarFiles == null) {
- jarFiles = new ArrayList<File>();
- }
- Map<String, String> jars = new HashMap<String, String>(jarFiles.size());
- List<String> names = new ArrayList<String>(jarFiles.size());
-
- for (File f : jarFiles) {
- if (!f.exists()) {
- LOG.info(f.getName() + " is not existed: "
- + f.getAbsolutePath());
- continue;
- }
- jars.put(f.getName(), f.getAbsolutePath());
- names.add(f.getName());
- }
- LOG.info("Files: " + names + " will be loaded");
- stormConf.put(GenericOptionsParser.TOPOLOGY_LIB_PATH, jars);
- stormConf.put(GenericOptionsParser.TOPOLOGY_LIB_NAME, names);
- submitTopology(name, stormConf, topology, opts);
- }
-
- public static void submitTopology(String name, Map stormConf,
- StormTopology topology, SubmitOptions opts,
- ProgressListener listener) throws AlreadyAliveException,
- InvalidTopologyException {
- submitTopology(name, stormConf, topology, opts);
- }
-
- /**
- * Submits a topology to run on the cluster. A topology runs forever or
- * until explicitly killed.
- *
- *
- * @param name
- * the name of the storm.
- * @param stormConf
- * the topology-specific configuration. See {@link Config}.
- * @param topology
- * the processing to execute.
- * @param options
- * to manipulate the starting of the topology
- * @throws AlreadyAliveException
- * if a topology with this name is already running
- * @throws InvalidTopologyException
- * if an invalid topology was submitted
- */
- public static void submitTopology(String name, Map stormConf,
- StormTopology topology, SubmitOptions opts)
- throws AlreadyAliveException, InvalidTopologyException {
- if (!Utils.isValidConf(stormConf)) {
- throw new IllegalArgumentException(
- "Storm conf is not valid. Must be json-serializable");
- }
- stormConf = new HashMap(stormConf);
- stormConf.putAll(Utils.readCommandLineOpts());
- Map conf = Utils.readStormConfig();
- conf.putAll(stormConf);
- putUserInfo(conf, stormConf);
- try {
- String serConf = Utils.to_json(stormConf);
- if (localNimbus != null) {
- LOG.info("Submitting topology " + name + " in local mode");
- localNimbus.submitTopology(name, null, serConf, topology);
- } else {
- NimbusClient client = NimbusClient.getConfiguredClient(conf);
- if (topologyNameExists(conf, name)) {
- throw new RuntimeException("Topology with name `" + name
- + "` already exists on cluster");
- }
-
- submitJar(conf);
- try {
- LOG.info("Submitting topology " + name
- + " in distributed mode with conf " + serConf);
- if (opts != null) {
- client.getClient().submitTopologyWithOpts(name, path,
- serConf, topology, opts);
- } else {
- // this is for backwards compatibility
- client.getClient().submitTopology(name, path, serConf,
- topology);
- }
- } finally {
- client.close();
- }
- }
- LOG.info("Finished submitting topology: " + name);
- } catch (InvalidTopologyException e) {
- LOG.warn("Topology submission exception", e);
- throw e;
- } catch (AlreadyAliveException e) {
- LOG.warn("Topology already alive exception", e);
- throw e;
- } catch (TopologyAssignException e) {
- LOG.warn("Failed to assign " + e.get_msg(), e);
- throw new RuntimeException(e);
- } catch (TException e) {
- LOG.warn("Failed to assign ", e);
- throw new RuntimeException(e);
- }
- }
-
- /**
- * Submits a topology to run on the cluster with a progress bar. A topology
- * runs forever or until explicitly killed.
- *
- *
- * @param name
- * the name of the storm.
- * @param stormConf
- * the topology-specific configuration. See {@link Config}.
- * @param topology
- * the processing to execute.
- * @throws AlreadyAliveException
- * if a topology with this name is already running
- * @throws InvalidTopologyException
- * if an invalid topology was submitted
- * @throws TopologyAssignException
- */
-
- public static void submitTopologyWithProgressBar(String name,
- Map stormConf, StormTopology topology)
- throws AlreadyAliveException, InvalidTopologyException {
- submitTopologyWithProgressBar(name, stormConf, topology, null);
- }
-
- /**
- * Submits a topology to run on the cluster with a progress bar. A topology
- * runs forever or until explicitly killed.
- *
- *
- * @param name
- * the name of the storm.
- * @param stormConf
- * the topology-specific configuration. See {@link Config}.
- * @param topology
- * the processing to execute.
- * @param opts
- * to manipulate the starting of the topology
- * @throws AlreadyAliveException
- * if a topology with this name is already running
- * @throws InvalidTopologyException
- * if an invalid topology was submitted
- * @throws TopologyAssignException
- */
-
- public static void submitTopologyWithProgressBar(String name,
- Map stormConf, StormTopology topology, SubmitOptions opts)
- throws AlreadyAliveException, InvalidTopologyException {
-
- /**
- * remove progress bar in jstorm
- */
- submitTopology(name, stormConf, topology, opts);
- }
-
- private static boolean topologyNameExists(Map conf, String name) {
- NimbusClient client = NimbusClient.getConfiguredClient(conf);
- try {
- ClusterSummary summary = client.getClient().getClusterInfo();
- for (TopologySummary s : summary.get_topologies()) {
- if (s.get_name().equals(name)) {
- return true;
- }
- }
- return false;
-
- } catch (Exception e) {
- throw new RuntimeException(e);
- } finally {
- client.close();
- }
- }
-
- private static String submittedJar = null;
- private static String path = null;
-
- private static void submitJar(Map conf) {
- if (submittedJar == null) {
- NimbusClient client = NimbusClient.getConfiguredClient(conf);
- try {
- LOG.info("Jar not uploaded to master yet. Submitting jar...");
- String localJar = System.getProperty("storm.jar");
- path = client.getClient().beginFileUpload();
- String[] pathCache = path.split("/");
- String uploadLocation = path + "/stormjar-"
- + pathCache[pathCache.length - 1] + ".jar";
- List<String> lib = (List<String>) conf
- .get(GenericOptionsParser.TOPOLOGY_LIB_NAME);
- Map<String, String> libPath = (Map<String, String>) conf
- .get(GenericOptionsParser.TOPOLOGY_LIB_PATH);
- if (lib != null && lib.size() != 0) {
- for (String libName : lib) {
- String jarPath = path + "/lib/" + libName;
- client.getClient().beginLibUpload(jarPath);
- submitJar(conf, libPath.get(libName), jarPath, client);
- }
-
- } else {
- if (localJar == null) {
- // no lib, no client jar
- throw new RuntimeException("No client app jar, please upload it");
- }
- }
-
- if (localJar != null) {
- submittedJar = submitJar(conf, localJar,
- uploadLocation, client);
- }else {
- // no client jar, but with lib jar
- client.getClient().finishFileUpload(uploadLocation);
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- } finally {
- client.close();
- }
- } else {
- LOG.info("Jar already uploaded to master. Not submitting jar.");
- }
- }
-
- public static String submitJar(Map conf, String localJar,
- String uploadLocation, NimbusClient client) {
- if (localJar == null) {
- throw new RuntimeException(
- "Must submit topologies using the 'storm' client script so that StormSubmitter knows which jar to upload.");
- }
-
- try {
-
- LOG.info("Uploading topology jar " + localJar
- + " to assigned location: " + uploadLocation);
- int bufferSize = 512 * 1024;
- Object maxBufSizeObject = conf
- .get(Config.NIMBUS_THRIFT_MAX_BUFFER_SIZE);
- if (maxBufSizeObject != null) {
- bufferSize = Utils.getInt(maxBufSizeObject) / 2;
- }
-
- BufferFileInputStream is = new BufferFileInputStream(localJar,
- bufferSize);
- while (true) {
- byte[] toSubmit = is.read();
- if (toSubmit.length == 0)
- break;
- client.getClient().uploadChunk(uploadLocation,
- ByteBuffer.wrap(toSubmit));
- }
- client.getClient().finishFileUpload(uploadLocation);
- LOG.info("Successfully uploaded topology jar to assigned location: "
- + uploadLocation);
- return uploadLocation;
- } catch (Exception e) {
- throw new RuntimeException(e);
- } finally {
-
- }
- }
-
- private static void putUserInfo(Map conf, Map stormConf) {
- stormConf.put("user.group", conf.get("user.group"));
- stormConf.put("user.name", conf.get("user.name"));
- stormConf.put("user.password", conf.get("user.password"));
- }
-
- /**
- * Interface use to track progress of file upload
- */
- public interface ProgressListener {
- /**
- * called before file is uploaded
- *
- * @param srcFile
- * - jar file to be uploaded
- * @param targetFile
- * - destination file
- * @param totalBytes
- * - total number of bytes of the file
- */
- public void onStart(String srcFile, String targetFile, long totalBytes);
-
- /**
- * called whenever a chunk of bytes is uploaded
- *
- * @param srcFile
- * - jar file to be uploaded
- * @param targetFile
- * - destination file
- * @param bytesUploaded
- * - number of bytes transferred so far
- * @param totalBytes
- * - total number of bytes of the file
- */
- public void onProgress(String srcFile, String targetFile,
- long bytesUploaded, long totalBytes);
-
- /**
- * called when the file is uploaded
- *
- * @param srcFile
- * - jar file to be uploaded
- * @param targetFile
- * - destination file
- * @param totalBytes
- * - total number of bytes of the file
- */
- public void onCompleted(String srcFile, String targetFile,
- long totalBytes);
- }
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/Tool.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/Tool.java b/jstorm-client/src/main/java/backtype/storm/Tool.java
deleted file mode 100644
index 5fa7ccc..0000000
--- a/jstorm-client/src/main/java/backtype/storm/Tool.java
+++ /dev/null
@@ -1,47 +0,0 @@
-package backtype.storm;
-
-/**
- * A tool abstract class that supports handling of generic
- * command-line options.
- *
- * <p>Here is how a typical <code>Tool</code> is implemented:</p>
- * <p><blockquote><pre>
- * public class TopologyApp extends Tool {
- * {@literal @}Override
- * public int run(String[] args) throws Exception {
- * // Config processed by ToolRunner
- * Config conf = getConf();
- *
- * // Other setups go here
- * String name = "topology";
- * StormTopology topology = buildTopology(args);
- * StormSubmitter.submitTopology(name, conf, topology);
- * return 0;
- * }
- *
- * StormTopology buildTopology(String[] args) { ... }
- *
- * public static void main(String[] args) throws Exception {
- * // Use ToolRunner to handle generic command-line options
- * ToolRunner.run(new TopologyApp(), args);
- * }
- * }
- * </pre></blockquote></p>
- *
- * @see GenericOptionsParser
- * @see ToolRunner
- */
-
-public abstract class Tool {
- Config config;
-
- public abstract int run(String[] args) throws Exception;
-
- public Config getConf() {
- return config;
- }
-
- public void setConf(Config config) {
- this.config = config;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/ToolRunner.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/ToolRunner.java b/jstorm-client/src/main/java/backtype/storm/ToolRunner.java
deleted file mode 100644
index 30940da..0000000
--- a/jstorm-client/src/main/java/backtype/storm/ToolRunner.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package backtype.storm;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import org.apache.commons.cli.ParseException;
-
-import backtype.storm.utils.Utils;
-
-/**
- * A utility to help run {@link Tool}s
- *
- * <p><code>ToolRunner</code> can be used to run classes extending the
- * <code>Tool</code> abstract class. It works in conjunction with
- * {@link GenericOptionsParser} to parse the <a
- * href="{@docRoot}/backtype/storm/GenericOptionsParser.html#GenericOptions">
- * generic storm command line arguments</a> and modifies the
- * <code>Config</code> of the <code>Tool</code>. The
- * application-specific options are passed along without being
- * modified.
- *
- * @see Tool
- * @see GenericOptionsParser
- */
-
-public class ToolRunner {
- static final Logger LOG = LoggerFactory.getLogger(ToolRunner.class);
-
- public static void run(Tool tool, String[] args) {
- run(tool.getConf(), tool, args);
- }
-
- public static void run(Config conf, Tool tool, String[] args) {
- try {
- if (conf == null) {
- conf = new Config();
- conf.putAll(Utils.readStormConfig());
- }
-
- GenericOptionsParser parser = new GenericOptionsParser(conf, args);
- tool.setConf(conf);
-
- System.exit(tool.run(parser.getRemainingArgs()));
- } catch (ParseException e) {
- LOG.error("Error parsing generic options: {}", e.getMessage());
- GenericOptionsParser.printGenericCommandUsage(System.err);
- System.exit(2);
- } catch (Exception e) {
- LOG.error("Error running tool", e);
- System.exit(1);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/clojure/ClojureBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/clojure/ClojureBolt.java b/jstorm-client/src/main/java/backtype/storm/clojure/ClojureBolt.java
deleted file mode 100644
index bd3873f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/clojure/ClojureBolt.java
+++ /dev/null
@@ -1,108 +0,0 @@
-package backtype.storm.clojure;
-
-import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.task.IBolt;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import clojure.lang.IFn;
-import clojure.lang.PersistentArrayMap;
-import clojure.lang.Keyword;
-import clojure.lang.Symbol;
-import clojure.lang.RT;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class ClojureBolt implements IRichBolt, FinishedCallback {
- Map<String, StreamInfo> _fields;
- List<String> _fnSpec;
- List<String> _confSpec;
- List<Object> _params;
-
- IBolt _bolt;
-
- public ClojureBolt(List fnSpec, List confSpec, List<Object> params,
- Map<String, StreamInfo> fields) {
- _fnSpec = fnSpec;
- _confSpec = confSpec;
- _params = params;
- _fields = fields;
- }
-
- @Override
- public void prepare(final Map stormConf, final TopologyContext context,
- final OutputCollector collector) {
- IFn hof = Utils.loadClojureFn(_fnSpec.get(0), _fnSpec.get(1));
- try {
- IFn preparer = (IFn) hof.applyTo(RT.seq(_params));
- final Map<Keyword, Object> collectorMap = new PersistentArrayMap(
- new Object[] {
- Keyword.intern(Symbol.create("output-collector")),
- collector,
- Keyword.intern(Symbol.create("context")), context });
- List<Object> args = new ArrayList<Object>() {
- {
- add(stormConf);
- add(context);
- add(collectorMap);
- }
- };
-
- _bolt = (IBolt) preparer.applyTo(RT.seq(args));
- // this is kind of unnecessary for clojure
- try {
- _bolt.prepare(stormConf, context, collector);
- } catch (AbstractMethodError ame) {
-
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void execute(Tuple input) {
- _bolt.execute(input);
- }
-
- @Override
- public void cleanup() {
- try {
- _bolt.cleanup();
- } catch (AbstractMethodError ame) {
-
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for (String stream : _fields.keySet()) {
- StreamInfo info = _fields.get(stream);
- declarer.declareStream(stream, info.is_direct(),
- new Fields(info.get_output_fields()));
- }
- }
-
- @Override
- public void finishedId(Object id) {
- if (_bolt instanceof FinishedCallback) {
- ((FinishedCallback) _bolt).finishedId(id);
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- IFn hof = Utils.loadClojureFn(_confSpec.get(0), _confSpec.get(1));
- try {
- return (Map) hof.applyTo(RT.seq(_params));
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/clojure/ClojureSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/clojure/ClojureSpout.java b/jstorm-client/src/main/java/backtype/storm/clojure/ClojureSpout.java
deleted file mode 100644
index 3606252..0000000
--- a/jstorm-client/src/main/java/backtype/storm/clojure/ClojureSpout.java
+++ /dev/null
@@ -1,142 +0,0 @@
-package backtype.storm.clojure;
-
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.spout.ISpout;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.utils.Utils;
-import clojure.lang.IFn;
-import clojure.lang.PersistentArrayMap;
-import clojure.lang.Keyword;
-import clojure.lang.Symbol;
-import clojure.lang.RT;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-
-public class ClojureSpout implements IRichSpout {
- Map<String, StreamInfo> _fields;
- List<String> _fnSpec;
- List<String> _confSpec;
- List<Object> _params;
-
- ISpout _spout;
-
- public ClojureSpout(List fnSpec, List confSpec, List<Object> params,
- Map<String, StreamInfo> fields) {
- _fnSpec = fnSpec;
- _confSpec = confSpec;
- _params = params;
- _fields = fields;
- }
-
- @Override
- public void open(final Map conf, final TopologyContext context,
- final SpoutOutputCollector collector) {
- IFn hof = Utils.loadClojureFn(_fnSpec.get(0), _fnSpec.get(1));
- try {
- IFn preparer = (IFn) hof.applyTo(RT.seq(_params));
- final Map<Keyword, Object> collectorMap = new PersistentArrayMap(
- new Object[] {
- Keyword.intern(Symbol.create("output-collector")),
- collector,
- Keyword.intern(Symbol.create("context")), context });
- List<Object> args = new ArrayList<Object>() {
- {
- add(conf);
- add(context);
- add(collectorMap);
- }
- };
-
- _spout = (ISpout) preparer.applyTo(RT.seq(args));
- // this is kind of unnecessary for clojure
- try {
- _spout.open(conf, context, collector);
- } catch (AbstractMethodError ame) {
-
- }
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void close() {
- try {
- _spout.close();
- } catch (AbstractMethodError ame) {
-
- }
- }
-
- @Override
- public void nextTuple() {
- try {
- _spout.nextTuple();
- } catch (AbstractMethodError ame) {
-
- }
-
- }
-
- @Override
- public void ack(Object msgId) {
- try {
- _spout.ack(msgId);
- } catch (AbstractMethodError ame) {
-
- }
-
- }
-
- @Override
- public void fail(Object msgId) {
- try {
- _spout.fail(msgId);
- } catch (AbstractMethodError ame) {
-
- }
-
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for (String stream : _fields.keySet()) {
- StreamInfo info = _fields.get(stream);
- declarer.declareStream(stream, info.is_direct(),
- new Fields(info.get_output_fields()));
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- IFn hof = Utils.loadClojureFn(_confSpec.get(0), _confSpec.get(1));
- try {
- return (Map) hof.applyTo(RT.seq(_params));
- } catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- @Override
- public void activate() {
- try {
- _spout.activate();
- } catch (AbstractMethodError ame) {
-
- }
- }
-
- @Override
- public void deactivate() {
- try {
- _spout.deactivate();
- } catch (AbstractMethodError ame) {
-
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/clojure/RichShellBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/clojure/RichShellBolt.java b/jstorm-client/src/main/java/backtype/storm/clojure/RichShellBolt.java
deleted file mode 100644
index 9289448..0000000
--- a/jstorm-client/src/main/java/backtype/storm/clojure/RichShellBolt.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package backtype.storm.clojure;
-
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.task.ShellBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-
-public class RichShellBolt extends ShellBolt implements IRichBolt {
- private Map<String, StreamInfo> _outputs;
-
- public RichShellBolt(String[] command, Map<String, StreamInfo> outputs) {
- super(command);
- _outputs = outputs;
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for (String stream : _outputs.keySet()) {
- StreamInfo def = _outputs.get(stream);
- if (def.is_direct()) {
- declarer.declareStream(stream, true,
- new Fields(def.get_output_fields()));
- } else {
- declarer.declareStream(stream,
- new Fields(def.get_output_fields()));
- }
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/clojure/RichShellSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/clojure/RichShellSpout.java b/jstorm-client/src/main/java/backtype/storm/clojure/RichShellSpout.java
deleted file mode 100644
index 98763a5..0000000
--- a/jstorm-client/src/main/java/backtype/storm/clojure/RichShellSpout.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package backtype.storm.clojure;
-
-import backtype.storm.generated.StreamInfo;
-import backtype.storm.spout.ShellSpout;
-import backtype.storm.topology.IRichSpout;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import java.util.Map;
-
-public class RichShellSpout extends ShellSpout implements IRichSpout {
- private Map<String, StreamInfo> _outputs;
-
- public RichShellSpout(String[] command, Map<String, StreamInfo> outputs) {
- super(command);
- _outputs = outputs;
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- for (String stream : _outputs.keySet()) {
- StreamInfo def = _outputs.get(stream);
- if (def.is_direct()) {
- declarer.declareStream(stream, true,
- new Fields(def.get_output_fields()));
- } else {
- declarer.declareStream(stream,
- new Fields(def.get_output_fields()));
- }
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return null;
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/activate.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/activate.java b/jstorm-client/src/main/java/backtype/storm/command/activate.java
deleted file mode 100644
index ed8c33a..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/activate.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package backtype.storm.command;
-
-import java.security.InvalidParameterException;
-import java.util.Map;
-
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Activate topology
- *
- * @author longda
- *
- */
-public class activate {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input topology name");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- client.getClient().activate(topologyName);
-
- System.out.println("Successfully submit command activate "
- + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/config_value.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/config_value.java b/jstorm-client/src/main/java/backtype/storm/command/config_value.java
deleted file mode 100644
index 90a3ed0..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/config_value.java
+++ /dev/null
@@ -1,33 +0,0 @@
-package backtype.storm.command;
-
-import java.security.InvalidParameterException;
-import java.util.Map;
-
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Get configuration
- *
- * @author longda
- *
- */
-public class config_value {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input key name");
- }
-
- String key = args[0];
-
- Map conf = Utils.readStormConfig();
-
- System.out.print("VALUE: " + String.valueOf(conf.get(key)));
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/deactivate.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/deactivate.java b/jstorm-client/src/main/java/backtype/storm/command/deactivate.java
deleted file mode 100644
index 845f456..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/deactivate.java
+++ /dev/null
@@ -1,49 +0,0 @@
-package backtype.storm.command;
-
-import java.security.InvalidParameterException;
-import java.util.Map;
-
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Deactivate topology
- *
- * @author longda
- *
- */
-public class deactivate {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input topology name");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- client.getClient().deactivate(topologyName);
-
- System.out.println("Successfully submit command deactivate "
- + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/kill_topology.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/kill_topology.java b/jstorm-client/src/main/java/backtype/storm/command/kill_topology.java
deleted file mode 100644
index 0c950cb..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/kill_topology.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package backtype.storm.command;
-
-import java.security.InvalidParameterException;
-import java.util.Map;
-
-import backtype.storm.generated.KillOptions;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Kill topology
- *
- * @author longda
- *
- */
-public class kill_topology {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input topology name");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- if (args.length == 1) {
-
- client.getClient().killTopology(topologyName);
- } else {
- int delaySeconds = Integer.parseInt(args[1]);
-
- KillOptions options = new KillOptions();
- options.set_wait_secs(delaySeconds);
-
- client.getClient().killTopologyWithOpts(topologyName, options);
-
- }
-
- System.out.println("Successfully submit command kill "
- + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/list.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/list.java b/jstorm-client/src/main/java/backtype/storm/command/list.java
deleted file mode 100644
index b0e40bc..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/list.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package backtype.storm.command;
-
-import java.util.Map;
-
-import org.apache.commons.lang.StringUtils;
-
-import backtype.storm.generated.ClusterSummary;
-import backtype.storm.generated.TopologyInfo;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Activate topology
- *
- * @author longda
- *
- */
-public class list {
-
-
-
-
- /**
- * @param args
- */
- public static void main(String[] args) {
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- if (args.length > 0 && StringUtils.isBlank(args[0]) == false) {
- String topologyName = args[0];
- TopologyInfo info = client.getClient().getTopologyInfoByName(topologyName);
-
- System.out.println("Successfully get topology info \n"
- + Utils.toPrettyJsonString(info));
- }else {
- ClusterSummary clusterSummary = client.getClient().getClusterInfo();
-
- System.out.println("Successfully get cluster info \n"
- + Utils.toPrettyJsonString(clusterSummary));
- }
-
-
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/metrics_monitor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/metrics_monitor.java b/jstorm-client/src/main/java/backtype/storm/command/metrics_monitor.java
deleted file mode 100644
index bb339d4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/metrics_monitor.java
+++ /dev/null
@@ -1,56 +0,0 @@
-package backtype.storm.command;
-
-import java.util.Map;
-import java.security.InvalidParameterException;
-
-import backtype.storm.generated.MonitorOptions;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Monitor topology
- *
- * @author Basti
- *
- */
-public class metrics_monitor {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length <= 1) {
- throw new InvalidParameterException("Should input topology name and enable flag");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- boolean isEnable = Boolean.valueOf(args[1]).booleanValue();
-
- MonitorOptions options = new MonitorOptions();
- options.set_isEnable(isEnable);
-
- client.getClient().metricMonitor(topologyName, options);
-
- String str = (isEnable) ? "enable" : "disable";
- System.out.println("Successfully submit command to " + str
- + " the monitor of " + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/rebalance.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/rebalance.java b/jstorm-client/src/main/java/backtype/storm/command/rebalance.java
deleted file mode 100644
index 72c3f26..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/rebalance.java
+++ /dev/null
@@ -1,60 +0,0 @@
-package backtype.storm.command;
-
-import java.security.InvalidParameterException;
-import java.util.Map;
-
-import backtype.storm.generated.RebalanceOptions;
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Active topology
- *
- * @author longda
- *
- */
-public class rebalance {
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input topology name");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
-
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- if (args.length == 1) {
-
- client.getClient().rebalance(topologyName, null);
- } else {
- int delaySeconds = Integer.parseInt(args[1]);
-
- RebalanceOptions options = new RebalanceOptions();
- options.set_wait_secs(delaySeconds);
-
- client.getClient().rebalance(topologyName, options);
- }
-
- System.out.println("Successfully submit command rebalance "
- + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/command/restart.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/command/restart.java b/jstorm-client/src/main/java/backtype/storm/command/restart.java
deleted file mode 100644
index 1eb577d..0000000
--- a/jstorm-client/src/main/java/backtype/storm/command/restart.java
+++ /dev/null
@@ -1,120 +0,0 @@
-package backtype.storm.command;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.InputStream;
-import java.security.InvalidParameterException;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Properties;
-
-import org.yaml.snakeyaml.Yaml;
-
-import backtype.storm.utils.NimbusClient;
-import backtype.storm.utils.Utils;
-
-/**
- * Active topology
- *
- * @author basti
- *
- */
-public class restart {
- private static Map LoadProperty(String prop) {
- Map ret = new HashMap<Object, Object>();
- Properties properties = new Properties();
-
- try {
- InputStream stream = new FileInputStream(prop);
- properties.load(stream);
- if (properties.size() == 0) {
- System.out.println("WARN: Config file is empty");
- return null;
- } else {
- ret.putAll(properties);
- }
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + prop);
- throw new RuntimeException(e.getMessage());
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException(e1.getMessage());
- }
-
- return ret;
- }
-
- private static Map LoadYaml(String confPath) {
- Map ret = new HashMap<Object, Object>();
- Yaml yaml = new Yaml();
-
- try {
- InputStream stream = new FileInputStream(confPath);
- ret = (Map) yaml.load(stream);
- if (ret == null || ret.isEmpty() == true) {
- System.out.println("WARN: Config file is empty");
- return null;
- }
- } catch (FileNotFoundException e) {
- System.out.println("No such file " + confPath);
- throw new RuntimeException("No config file");
- } catch (Exception e1) {
- e1.printStackTrace();
- throw new RuntimeException("Failed to read config file");
- }
-
- return ret;
- }
-
- private static Map LoadConf(String arg) {
- Map ret = null;
- if (arg.endsWith("yaml")) {
- ret = LoadYaml(arg);
- } else {
- ret = LoadProperty(arg);
- }
- return ret;
- }
-
- /**
- * @param args
- */
- public static void main(String[] args) {
- // TODO Auto-generated method stub
- if (args == null || args.length == 0) {
- throw new InvalidParameterException("Should input topology name");
- }
-
- String topologyName = args[0];
-
- NimbusClient client = null;
- try {
- Map conf = Utils.readStormConfig();
- client = NimbusClient.getConfiguredClient(conf);
-
- System.out.println("It will take 15 ~ 100 seconds to restart, please wait patiently\n");
-
- if (args.length == 1) {
- client.getClient().restart(topologyName, null);
- } else {
- Map loadConf = LoadConf(args[1]);
- String jsonConf = Utils.to_json(loadConf);
- System.out.println("New configuration:\n" + jsonConf);
-
- client.getClient().restart(topologyName, jsonConf);
- }
-
- System.out.println("Successfully submit command restart "
- + topologyName);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- e.printStackTrace();
- throw new RuntimeException(e);
- } finally {
- if (client != null) {
- client.close();
- }
- }
- }
-
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java b/jstorm-client/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
deleted file mode 100644
index 5502975..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/BatchBoltExecutor.java
+++ /dev/null
@@ -1,92 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
-import backtype.storm.coordination.CoordinatedBolt.TimeoutCallback;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.FailedException;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.Utils;
-import java.util.HashMap;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class BatchBoltExecutor implements IRichBolt, FinishedCallback,
- TimeoutCallback {
- public static Logger LOG = LoggerFactory.getLogger(BatchBoltExecutor.class);
-
- byte[] _boltSer;
- Map<Object, IBatchBolt> _openTransactions;
- Map _conf;
- TopologyContext _context;
- BatchOutputCollectorImpl _collector;
-
- public BatchBoltExecutor(IBatchBolt bolt) {
- _boltSer = Utils.serialize(bolt);
- }
-
- @Override
- public void prepare(Map conf, TopologyContext context,
- OutputCollector collector) {
- _conf = conf;
- _context = context;
- _collector = new BatchOutputCollectorImpl(collector);
- _openTransactions = new HashMap<Object, IBatchBolt>();
- }
-
- @Override
- public void execute(Tuple input) {
- Object id = input.getValue(0);
- IBatchBolt bolt = getBatchBolt(id);
- try {
- bolt.execute(input);
- _collector.ack(input);
- } catch (FailedException e) {
- LOG.error("Failed to process tuple in batch", e);
- _collector.fail(input);
- }
- }
-
- @Override
- public void cleanup() {
- }
-
- @Override
- public void finishedId(Object id) {
- IBatchBolt bolt = getBatchBolt(id);
- _openTransactions.remove(id);
- bolt.finishBatch();
- }
-
- @Override
- public void timeoutId(Object attempt) {
- _openTransactions.remove(attempt);
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- newTransactionalBolt().declareOutputFields(declarer);
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return newTransactionalBolt().getComponentConfiguration();
- }
-
- private IBatchBolt getBatchBolt(Object id) {
- IBatchBolt bolt = _openTransactions.get(id);
- if (bolt == null) {
- bolt = newTransactionalBolt();
- bolt.prepare(_conf, _context, _collector, id);
- _openTransactions.put(id, bolt);
- }
- return bolt;
- }
-
- private IBatchBolt newTransactionalBolt() {
- return (IBatchBolt) Utils.deserialize(_boltSer);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollector.java b/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
deleted file mode 100644
index e087f5f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollector.java
+++ /dev/null
@@ -1,31 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.utils.Utils;
-import java.util.List;
-
-public abstract class BatchOutputCollector {
-
- /**
- * Emits a tuple to the default output stream.
- */
- public List<Integer> emit(List<Object> tuple) {
- return emit(Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- public abstract List<Integer> emit(String streamId, List<Object> tuple);
-
- /**
- * Emits a tuple to the specified task on the default output stream. This
- * output stream must have been declared as a direct stream, and the
- * specified task must use a direct grouping on this stream to receive the
- * message.
- */
- public void emitDirect(int taskId, List<Object> tuple) {
- emitDirect(taskId, Utils.DEFAULT_STREAM_ID, tuple);
- }
-
- public abstract void emitDirect(int taskId, String streamId,
- List<Object> tuple);
-
- public abstract void reportError(Throwable error);
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java b/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
deleted file mode 100644
index 73eff73..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/BatchOutputCollectorImpl.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.tuple.Tuple;
-import java.util.List;
-
-public class BatchOutputCollectorImpl extends BatchOutputCollector {
- OutputCollector _collector;
-
- public BatchOutputCollectorImpl(OutputCollector collector) {
- _collector = collector;
- }
-
- @Override
- public List<Integer> emit(String streamId, List<Object> tuple) {
- return _collector.emit(streamId, tuple);
- }
-
- @Override
- public void emitDirect(int taskId, String streamId, List<Object> tuple) {
- _collector.emitDirect(taskId, streamId, tuple);
- }
-
- @Override
- public void reportError(Throwable error) {
- _collector.reportError(error);
- }
-
- public void ack(Tuple tup) {
- _collector.ack(tup);
- }
-
- public void fail(Tuple tup) {
- _collector.fail(tup);
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java b/jstorm-client/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
deleted file mode 100644
index eff05b4..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/BatchSubtopologyBuilder.java
+++ /dev/null
@@ -1,469 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.Constants;
-import backtype.storm.coordination.CoordinatedBolt.SourceArgs;
-import backtype.storm.generated.GlobalStreamId;
-import backtype.storm.generated.Grouping;
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.BaseConfigurationDeclarer;
-import backtype.storm.topology.BasicBoltExecutor;
-import backtype.storm.topology.BoltDeclarer;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.InputDeclarer;
-import backtype.storm.topology.TopologyBuilder;
-import backtype.storm.tuple.Fields;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-
-public class BatchSubtopologyBuilder {
- Map<String, Component> _bolts = new HashMap<String, Component>();
- Component _masterBolt;
- String _masterId;
-
- public BatchSubtopologyBuilder(String masterBoltId, IBasicBolt masterBolt,
- Number boltParallelism) {
- Integer p = boltParallelism == null ? null : boltParallelism.intValue();
- _masterBolt = new Component(new BasicBoltExecutor(masterBolt), p);
- _masterId = masterBoltId;
- }
-
- public BatchSubtopologyBuilder(String masterBoltId, IBasicBolt masterBolt) {
- this(masterBoltId, masterBolt, null);
- }
-
- public BoltDeclarer getMasterDeclarer() {
- return new BoltDeclarerImpl(_masterBolt);
- }
-
- public BoltDeclarer setBolt(String id, IBatchBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- public BoltDeclarer setBolt(String id, IBatchBolt bolt, Number parallelism) {
- return setBolt(id, new BatchBoltExecutor(bolt), parallelism);
- }
-
- public BoltDeclarer setBolt(String id, IBasicBolt bolt) {
- return setBolt(id, bolt, null);
- }
-
- public BoltDeclarer setBolt(String id, IBasicBolt bolt, Number parallelism) {
- return setBolt(id, new BasicBoltExecutor(bolt), parallelism);
- }
-
- private BoltDeclarer setBolt(String id, IRichBolt bolt, Number parallelism) {
- Integer p = null;
- if (parallelism != null)
- p = parallelism.intValue();
- Component component = new Component(bolt, p);
- _bolts.put(id, component);
- return new BoltDeclarerImpl(component);
- }
-
- public void extendTopology(TopologyBuilder builder) {
- BoltDeclarer declarer = builder.setBolt(_masterId, new CoordinatedBolt(
- _masterBolt.bolt), _masterBolt.parallelism);
- for (InputDeclaration decl : _masterBolt.declarations) {
- decl.declare(declarer);
- }
- for (Map conf : _masterBolt.componentConfs) {
- declarer.addConfigurations(conf);
- }
- for (String id : _bolts.keySet()) {
- Component component = _bolts.get(id);
- Map<String, SourceArgs> coordinatedArgs = new HashMap<String, SourceArgs>();
- for (String c : componentBoltSubscriptions(component)) {
- SourceArgs source;
- if (c.equals(_masterId)) {
- source = SourceArgs.single();
- } else {
- source = SourceArgs.all();
- }
- coordinatedArgs.put(c, source);
- }
-
- BoltDeclarer input = builder.setBolt(id, new CoordinatedBolt(
- component.bolt, coordinatedArgs, null),
- component.parallelism);
- for (Map conf : component.componentConfs) {
- input.addConfigurations(conf);
- }
- for (String c : componentBoltSubscriptions(component)) {
- input.directGrouping(c, Constants.COORDINATED_STREAM_ID);
- }
- for (InputDeclaration d : component.declarations) {
- d.declare(input);
- }
- }
- }
-
- private Set<String> componentBoltSubscriptions(Component component) {
- Set<String> ret = new HashSet<String>();
- for (InputDeclaration d : component.declarations) {
- ret.add(d.getComponent());
- }
- return ret;
- }
-
- private static class Component {
- public IRichBolt bolt;
- public Integer parallelism;
- public List<InputDeclaration> declarations = new ArrayList<InputDeclaration>();
- public List<Map> componentConfs = new ArrayList<Map>();
-
- public Component(IRichBolt bolt, Integer parallelism) {
- this.bolt = bolt;
- this.parallelism = parallelism;
- }
- }
-
- private static interface InputDeclaration {
- void declare(InputDeclarer declarer);
-
- String getComponent();
- }
-
- private class BoltDeclarerImpl extends
- BaseConfigurationDeclarer<BoltDeclarer> implements BoltDeclarer {
- Component _component;
-
- public BoltDeclarerImpl(Component component) {
- _component = component;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component,
- final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer fieldsGrouping(final String component,
- final String streamId, final Fields fields) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.fieldsGrouping(component, streamId, fields);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer globalGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.globalGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer shuffleGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.shuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localOrShuffleGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localOrShuffleGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String componentId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(componentId);
- }
-
- @Override
- public String getComponent() {
- return componentId;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer localFirstGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.localFirstGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer noneGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.noneGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer allGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.allGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer directGrouping(final String component,
- final String streamId) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.directGrouping(component, streamId);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component,
- final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer customGrouping(final String component,
- final String streamId, final CustomStreamGrouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.customGrouping(component, streamId, grouping);
- }
-
- @Override
- public String getComponent() {
- return component;
- }
- });
- return this;
- }
-
- @Override
- public BoltDeclarer grouping(final GlobalStreamId stream,
- final Grouping grouping) {
- addDeclaration(new InputDeclaration() {
- @Override
- public void declare(InputDeclarer declarer) {
- declarer.grouping(stream, grouping);
- }
-
- @Override
- public String getComponent() {
- return stream.get_componentId();
- }
- });
- return this;
- }
-
- private void addDeclaration(InputDeclaration declaration) {
- _component.declarations.add(declaration);
- }
-
- @Override
- public BoltDeclarer addConfigurations(Map conf) {
- _component.componentConfs.add(conf);
- return this;
- }
-
-
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/CoordinatedBolt.java b/jstorm-client/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
deleted file mode 100644
index 1e78932..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/CoordinatedBolt.java
+++ /dev/null
@@ -1,379 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.topology.FailedException;
-import java.util.Map.Entry;
-import backtype.storm.tuple.Values;
-import backtype.storm.generated.GlobalStreamId;
-import java.util.Collection;
-import backtype.storm.Constants;
-import backtype.storm.generated.Grouping;
-import backtype.storm.task.IOutputCollector;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.TimeCacheMap;
-import backtype.storm.utils.Utils;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import static backtype.storm.utils.Utils.get;
-
-/**
- * Coordination requires the request ids to be globally unique for awhile. This
- * is so it doesn't get confused in the case of retries.
- */
-public class CoordinatedBolt implements IRichBolt {
- public static Logger LOG = LoggerFactory.getLogger(CoordinatedBolt.class);
-
- public static interface FinishedCallback {
- void finishedId(Object id);
- }
-
- public static interface TimeoutCallback {
- void timeoutId(Object id);
- }
-
- public static class SourceArgs implements Serializable {
- public boolean singleCount;
-
- protected SourceArgs(boolean singleCount) {
- this.singleCount = singleCount;
- }
-
- public static SourceArgs single() {
- return new SourceArgs(true);
- }
-
- public static SourceArgs all() {
- return new SourceArgs(false);
- }
-
- @Override
- public String toString() {
- return "<Single: " + singleCount + ">";
- }
- }
-
- public class CoordinatedOutputCollector implements IOutputCollector {
- IOutputCollector _delegate;
-
- public CoordinatedOutputCollector(IOutputCollector delegate) {
- _delegate = delegate;
- }
-
- public List<Integer> emit(String stream, Collection<Tuple> anchors,
- List<Object> tuple) {
- List<Integer> tasks = _delegate.emit(stream, anchors, tuple);
- updateTaskCounts(tuple.get(0), tasks);
- return tasks;
- }
-
- public void emitDirect(int task, String stream,
- Collection<Tuple> anchors, List<Object> tuple) {
- updateTaskCounts(tuple.get(0), Arrays.asList(task));
- _delegate.emitDirect(task, stream, anchors, tuple);
- }
-
- public void ack(Tuple tuple) {
- Object id = tuple.getValue(0);
- synchronized (_tracked) {
- TrackingInfo track = _tracked.get(id);
- if (track != null)
- track.receivedTuples++;
- }
- boolean failed = checkFinishId(tuple, TupleType.REGULAR);
- if (failed) {
- _delegate.fail(tuple);
- } else {
- _delegate.ack(tuple);
- }
- }
-
- public void fail(Tuple tuple) {
- Object id = tuple.getValue(0);
- synchronized (_tracked) {
- TrackingInfo track = _tracked.get(id);
- if (track != null)
- track.failed = true;
- }
- checkFinishId(tuple, TupleType.REGULAR);
- _delegate.fail(tuple);
- }
-
- public void reportError(Throwable error) {
- _delegate.reportError(error);
- }
-
- private void updateTaskCounts(Object id, List<Integer> tasks) {
- synchronized (_tracked) {
- TrackingInfo track = _tracked.get(id);
- if (track != null) {
- Map<Integer, Integer> taskEmittedTuples = track.taskEmittedTuples;
- for (Integer task : tasks) {
- int newCount = get(taskEmittedTuples, task, 0) + 1;
- taskEmittedTuples.put(task, newCount);
- }
- }
- }
- }
- }
-
- private Map<String, SourceArgs> _sourceArgs;
- private IdStreamSpec _idStreamSpec;
- private IRichBolt _delegate;
- private Integer _numSourceReports;
- private List<Integer> _countOutTasks = new ArrayList<Integer>();;
- private OutputCollector _collector;
- private TimeCacheMap<Object, TrackingInfo> _tracked;
-
- public static class TrackingInfo {
- int reportCount = 0;
- int expectedTupleCount = 0;
- int receivedTuples = 0;
- boolean failed = false;
- Map<Integer, Integer> taskEmittedTuples = new HashMap<Integer, Integer>();
- boolean receivedId = false;
- boolean finished = false;
- List<Tuple> ackTuples = new ArrayList<Tuple>();
-
- @Override
- public String toString() {
- return "reportCount: " + reportCount + "\n"
- + "expectedTupleCount: " + expectedTupleCount + "\n"
- + "receivedTuples: " + receivedTuples + "\n" + "failed: "
- + failed + "\n" + taskEmittedTuples.toString();
- }
- }
-
- public static class IdStreamSpec implements Serializable {
- GlobalStreamId _id;
-
- public GlobalStreamId getGlobalStreamId() {
- return _id;
- }
-
- public static IdStreamSpec makeDetectSpec(String component,
- String stream) {
- return new IdStreamSpec(component, stream);
- }
-
- protected IdStreamSpec(String component, String stream) {
- _id = new GlobalStreamId(component, stream);
- }
- }
-
- public CoordinatedBolt(IRichBolt delegate) {
- this(delegate, null, null);
- }
-
- public CoordinatedBolt(IRichBolt delegate, String sourceComponent,
- SourceArgs sourceArgs, IdStreamSpec idStreamSpec) {
- this(delegate, singleSourceArgs(sourceComponent, sourceArgs),
- idStreamSpec);
- }
-
- public CoordinatedBolt(IRichBolt delegate,
- Map<String, SourceArgs> sourceArgs, IdStreamSpec idStreamSpec) {
- _sourceArgs = sourceArgs;
- if (_sourceArgs == null)
- _sourceArgs = new HashMap<String, SourceArgs>();
- _delegate = delegate;
- _idStreamSpec = idStreamSpec;
- }
-
- public void prepare(Map config, TopologyContext context,
- OutputCollector collector) {
- TimeCacheMap.ExpiredCallback<Object, TrackingInfo> callback = null;
- if (_delegate instanceof TimeoutCallback) {
- callback = new TimeoutItems();
- }
- _tracked = new TimeCacheMap<Object, TrackingInfo>(
- context.maxTopologyMessageTimeout(), callback);
- _collector = collector;
- _delegate.prepare(config, context, new OutputCollector(
- new CoordinatedOutputCollector(collector)));
- for (String component : Utils.get(context.getThisTargets(),
- Constants.COORDINATED_STREAM_ID,
- new HashMap<String, Grouping>()).keySet()) {
- for (Integer task : context.getComponentTasks(component)) {
- _countOutTasks.add(task);
- }
- }
- if (!_sourceArgs.isEmpty()) {
- _numSourceReports = 0;
- for (Entry<String, SourceArgs> entry : _sourceArgs.entrySet()) {
- if (entry.getValue().singleCount) {
- _numSourceReports += 1;
- } else {
- _numSourceReports += context.getComponentTasks(
- entry.getKey()).size();
- }
- }
- }
- }
-
- private boolean checkFinishId(Tuple tup, TupleType type) {
- Object id = tup.getValue(0);
- boolean failed = false;
-
- synchronized (_tracked) {
- TrackingInfo track = _tracked.get(id);
- try {
- if (track != null) {
- boolean delayed = false;
- if (_idStreamSpec == null && type == TupleType.COORD
- || _idStreamSpec != null && type == TupleType.ID) {
- track.ackTuples.add(tup);
- delayed = true;
- }
- if (track.failed) {
- failed = true;
- for (Tuple t : track.ackTuples) {
- _collector.fail(t);
- }
- _tracked.remove(id);
- } else if (track.receivedId
- && (_sourceArgs.isEmpty() || track.reportCount == _numSourceReports
- && track.expectedTupleCount == track.receivedTuples)) {
- if (_delegate instanceof FinishedCallback) {
- ((FinishedCallback) _delegate).finishedId(id);
- }
- if (!(_sourceArgs.isEmpty() || type != TupleType.REGULAR)) {
- throw new IllegalStateException(
- "Coordination condition met on a non-coordinating tuple. Should be impossible");
- }
- Iterator<Integer> outTasks = _countOutTasks.iterator();
- while (outTasks.hasNext()) {
- int task = outTasks.next();
- int numTuples = get(track.taskEmittedTuples, task,
- 0);
- _collector.emitDirect(task,
- Constants.COORDINATED_STREAM_ID, tup,
- new Values(id, numTuples));
- }
- for (Tuple t : track.ackTuples) {
- _collector.ack(t);
- }
- track.finished = true;
- _tracked.remove(id);
- }
- if (!delayed && type != TupleType.REGULAR) {
- if (track.failed) {
- _collector.fail(tup);
- } else {
- _collector.ack(tup);
- }
- }
- } else {
- if (type != TupleType.REGULAR)
- _collector.fail(tup);
- }
- } catch (FailedException e) {
- LOG.error("Failed to finish batch", e);
- for (Tuple t : track.ackTuples) {
- _collector.fail(t);
- }
- _tracked.remove(id);
- failed = true;
- }
- }
- return failed;
- }
-
- public void execute(Tuple tuple) {
- Object id = tuple.getValue(0);
- TrackingInfo track;
- TupleType type = getTupleType(tuple);
- synchronized (_tracked) {
- track = _tracked.get(id);
- if (track == null) {
- track = new TrackingInfo();
- if (_idStreamSpec == null)
- track.receivedId = true;
- _tracked.put(id, track);
- }
- }
-
- if (type == TupleType.ID) {
- synchronized (_tracked) {
- track.receivedId = true;
- }
- checkFinishId(tuple, type);
- } else if (type == TupleType.COORD) {
- int count = (Integer) tuple.getValue(1);
- synchronized (_tracked) {
- track.reportCount++;
- track.expectedTupleCount += count;
- }
- checkFinishId(tuple, type);
- } else {
- synchronized (_tracked) {
- _delegate.execute(tuple);
- }
- }
- }
-
- public void cleanup() {
- _delegate.cleanup();
- _tracked.cleanup();
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _delegate.declareOutputFields(declarer);
- declarer.declareStream(Constants.COORDINATED_STREAM_ID, true,
- new Fields("id", "count"));
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return _delegate.getComponentConfiguration();
- }
-
- private static Map<String, SourceArgs> singleSourceArgs(
- String sourceComponent, SourceArgs sourceArgs) {
- Map<String, SourceArgs> ret = new HashMap<String, SourceArgs>();
- ret.put(sourceComponent, sourceArgs);
- return ret;
- }
-
- private class TimeoutItems implements
- TimeCacheMap.ExpiredCallback<Object, TrackingInfo> {
- @Override
- public void expire(Object id, TrackingInfo val) {
- synchronized (_tracked) {
- // the combination of the lock and the finished flag ensure that
- // an id is never timed out if it has been finished
- val.failed = true;
- if (!val.finished) {
- ((TimeoutCallback) _delegate).timeoutId(id);
- }
- }
- }
- }
-
- private TupleType getTupleType(Tuple tuple) {
- if (_idStreamSpec != null
- && tuple.getSourceGlobalStreamid().equals(_idStreamSpec._id)) {
- return TupleType.ID;
- } else if (!_sourceArgs.isEmpty()
- && tuple.getSourceStreamId().equals(
- Constants.COORDINATED_STREAM_ID)) {
- return TupleType.COORD;
- } else {
- return TupleType.REGULAR;
- }
- }
-
- static enum TupleType {
- REGULAR, ID, COORD
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/coordination/IBatchBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/coordination/IBatchBolt.java b/jstorm-client/src/main/java/backtype/storm/coordination/IBatchBolt.java
deleted file mode 100644
index f8a9386..0000000
--- a/jstorm-client/src/main/java/backtype/storm/coordination/IBatchBolt.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package backtype.storm.coordination;
-
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.IComponent;
-import backtype.storm.tuple.Tuple;
-import java.io.Serializable;
-import java.util.Map;
-
-public interface IBatchBolt<T> extends Serializable, IComponent {
- void prepare(Map conf, TopologyContext context,
- BatchOutputCollector collector, T id);
-
- void execute(Tuple tuple);
-
- void finishBatch();
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/daemon/Shutdownable.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/daemon/Shutdownable.java b/jstorm-client/src/main/java/backtype/storm/daemon/Shutdownable.java
deleted file mode 100644
index 1e61512..0000000
--- a/jstorm-client/src/main/java/backtype/storm/daemon/Shutdownable.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package backtype.storm.daemon;
-
-public interface Shutdownable {
- public void shutdown();
-}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java b/jstorm-client/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
deleted file mode 100644
index 756b8aa..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/DRPCInvocationsClient.java
+++ /dev/null
@@ -1,78 +0,0 @@
-package backtype.storm.drpc;
-
-import org.apache.thrift7.TException;
-import org.apache.thrift7.protocol.TBinaryProtocol;
-import org.apache.thrift7.transport.TFramedTransport;
-import org.apache.thrift7.transport.TSocket;
-import org.apache.thrift7.transport.TTransport;
-
-import backtype.storm.generated.DRPCRequest;
-import backtype.storm.generated.DistributedRPCInvocations;
-
-public class DRPCInvocationsClient implements DistributedRPCInvocations.Iface {
- private TTransport conn;
- private DistributedRPCInvocations.Client client;
- private String host;
- private int port;
-
- public DRPCInvocationsClient(String host, int port) {
- try {
- this.host = host;
- this.port = port;
- connect();
- } catch (TException e) {
- throw new RuntimeException(e);
- }
- }
-
- private void connect() throws TException {
- conn = new TFramedTransport(new TSocket(host, port));
- client = new DistributedRPCInvocations.Client(new TBinaryProtocol(conn));
- conn.open();
- }
-
- public String getHost() {
- return host;
- }
-
- public int getPort() {
- return port;
- }
-
- public void result(String id, String result) throws TException {
- try {
- if (client == null)
- connect();
- client.result(id, result);
- } catch (TException e) {
- client = null;
- throw e;
- }
- }
-
- public DRPCRequest fetchRequest(String func) throws TException {
- try {
- if (client == null)
- connect();
- return client.fetchRequest(func);
- } catch (TException e) {
- client = null;
- throw e;
- }
- }
-
- public void failRequest(String id) throws TException {
- try {
- if (client == null)
- connect();
- client.failRequest(id);
- } catch (TException e) {
- client = null;
- throw e;
- }
- }
-
- public void close() {
- conn.close();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/DRPCSpout.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/DRPCSpout.java b/jstorm-client/src/main/java/backtype/storm/drpc/DRPCSpout.java
deleted file mode 100644
index a68b008..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/DRPCSpout.java
+++ /dev/null
@@ -1,162 +0,0 @@
-package backtype.storm.drpc;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.thrift7.TException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import backtype.storm.Config;
-import backtype.storm.ILocalDRPC;
-import backtype.storm.generated.DRPCRequest;
-import backtype.storm.generated.DistributedRPCInvocations;
-import backtype.storm.spout.SpoutOutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichSpout;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Values;
-import backtype.storm.utils.ServiceRegistry;
-import backtype.storm.utils.Utils;
-
-public class DRPCSpout extends BaseRichSpout {
- public static Logger LOG = LoggerFactory.getLogger(DRPCSpout.class);
-
- SpoutOutputCollector _collector;
- List<DRPCInvocationsClient> _clients = new ArrayList<DRPCInvocationsClient>();
- String _function;
- String _local_drpc_id = null;
-
- private static class DRPCMessageId {
- String id;
- int index;
-
- public DRPCMessageId(String id, int index) {
- this.id = id;
- this.index = index;
- }
- }
-
- public DRPCSpout(String function) {
- _function = function;
- }
-
- public DRPCSpout(String function, ILocalDRPC drpc) {
- _function = function;
- _local_drpc_id = drpc.getServiceId();
- }
-
- @Override
- public void open(Map conf, TopologyContext context,
- SpoutOutputCollector collector) {
- _collector = collector;
- if (_local_drpc_id == null) {
- int numTasks = context.getComponentTasks(
- context.getThisComponentId()).size();
- int index = context.getThisTaskIndex();
-
- int port = Utils.getInt(conf.get(Config.DRPC_INVOCATIONS_PORT));
- List<String> servers = (List<String>) conf.get(Config.DRPC_SERVERS);
- if (servers == null || servers.isEmpty()) {
- throw new RuntimeException(
- "No DRPC servers configured for topology");
- }
- if (numTasks < servers.size()) {
- for (String s : servers) {
- _clients.add(new DRPCInvocationsClient(s, port));
- }
- } else {
- int i = index % servers.size();
- _clients.add(new DRPCInvocationsClient(servers.get(i), port));
- }
- }
-
- }
-
- @Override
- public void close() {
- for (DRPCInvocationsClient client : _clients) {
- client.close();
- }
- }
-
- @Override
- public void nextTuple() {
- boolean gotRequest = false;
- if (_local_drpc_id == null) {
- for (int i = 0; i < _clients.size(); i++) {
- DRPCInvocationsClient client = _clients.get(i);
- try {
- DRPCRequest req = client.fetchRequest(_function);
- if (req.get_request_id().length() > 0) {
- Map returnInfo = new HashMap();
- returnInfo.put("id", req.get_request_id());
- returnInfo.put("host", client.getHost());
- returnInfo.put("port", client.getPort());
- gotRequest = true;
- _collector.emit(new Values(req.get_func_args(),
- Utils.to_json(returnInfo)),
- new DRPCMessageId(req.get_request_id(), i));
- break;
- }
- } catch (TException e) {
- LOG.error("Failed to fetch DRPC result from DRPC server", e);
- }
- }
- } else {
- DistributedRPCInvocations.Iface drpc = (DistributedRPCInvocations.Iface) ServiceRegistry
- .getService(_local_drpc_id);
- if (drpc != null) { // can happen during shutdown of drpc while
- // topology is still up
- try {
- DRPCRequest req = drpc.fetchRequest(_function);
- if (req.get_request_id().length() > 0) {
- Map returnInfo = new HashMap();
- returnInfo.put("id", req.get_request_id());
- returnInfo.put("host", _local_drpc_id);
- returnInfo.put("port", 0);
- gotRequest = true;
- _collector.emit(new Values(req.get_func_args(),
- Utils.to_json(returnInfo)),
- new DRPCMessageId(req.get_request_id(), 0));
- }
- } catch (TException e) {
- throw new RuntimeException(e);
- }
- }
- }
- if (!gotRequest) {
- Utils.sleep(1);
- }
- }
-
- @Override
- public void ack(Object msgId) {
- }
-
- @Override
- public void fail(Object msgId) {
- DRPCMessageId did = (DRPCMessageId) msgId;
- DistributedRPCInvocations.Iface client;
-
- if (_local_drpc_id == null) {
- client = _clients.get(did.index);
- } else {
- client = (DistributedRPCInvocations.Iface) ServiceRegistry
- .getService(_local_drpc_id);
- }
- try {
- client.failRequest(did.id);
- } catch (TException e) {
- LOG.error("Failed to fail request", e);
- }
- }
-
- @Override
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("args", "return-info"));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/JoinResult.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/JoinResult.java b/jstorm-client/src/main/java/backtype/storm/drpc/JoinResult.java
deleted file mode 100644
index b6733a3..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/JoinResult.java
+++ /dev/null
@@ -1,59 +0,0 @@
-package backtype.storm.drpc;
-
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.topology.base.BaseRichBolt;
-import backtype.storm.tuple.Fields;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.tuple.Values;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class JoinResult extends BaseRichBolt {
- public static Logger LOG = LoggerFactory.getLogger(JoinResult.class);
-
- String returnComponent;
- Map<Object, Tuple> returns = new HashMap<Object, Tuple>();
- Map<Object, Tuple> results = new HashMap<Object, Tuple>();
- OutputCollector _collector;
-
- public JoinResult(String returnComponent) {
- this.returnComponent = returnComponent;
- }
-
- public void prepare(Map map, TopologyContext context,
- OutputCollector collector) {
- _collector = collector;
- }
-
- public void execute(Tuple tuple) {
- Object requestId = tuple.getValue(0);
- if (tuple.getSourceComponent().equals(returnComponent)) {
- returns.put(requestId, tuple);
- } else {
- results.put(requestId, tuple);
- }
-
- if (returns.containsKey(requestId) && results.containsKey(requestId)) {
- Tuple result = results.remove(requestId);
- Tuple returner = returns.remove(requestId);
- LOG.debug(result.getValue(1).toString());
- List<Tuple> anchors = new ArrayList<Tuple>();
- anchors.add(result);
- anchors.add(returner);
- _collector.emit(anchors, new Values("" + result.getValue(1),
- returner.getValue(1)));
- _collector.ack(result);
- _collector.ack(returner);
- }
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- declarer.declare(new Fields("result", "return-info"));
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/KeyedFairBolt.java b/jstorm-client/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
deleted file mode 100644
index 287168f..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/KeyedFairBolt.java
+++ /dev/null
@@ -1,75 +0,0 @@
-package backtype.storm.drpc;
-
-import backtype.storm.coordination.CoordinatedBolt.FinishedCallback;
-import backtype.storm.task.OutputCollector;
-import backtype.storm.task.TopologyContext;
-import backtype.storm.topology.BasicBoltExecutor;
-import backtype.storm.topology.IBasicBolt;
-import backtype.storm.topology.IRichBolt;
-import backtype.storm.topology.OutputFieldsDeclarer;
-import backtype.storm.tuple.Tuple;
-import backtype.storm.utils.KeyedRoundRobinQueue;
-import java.util.HashMap;
-import java.util.Map;
-
-public class KeyedFairBolt implements IRichBolt, FinishedCallback {
- IRichBolt _delegate;
- KeyedRoundRobinQueue<Tuple> _rrQueue;
- Thread _executor;
- FinishedCallback _callback;
-
- public KeyedFairBolt(IRichBolt delegate) {
- _delegate = delegate;
- }
-
- public KeyedFairBolt(IBasicBolt delegate) {
- this(new BasicBoltExecutor(delegate));
- }
-
- public void prepare(Map stormConf, TopologyContext context,
- OutputCollector collector) {
- if (_delegate instanceof FinishedCallback) {
- _callback = (FinishedCallback) _delegate;
- }
- _delegate.prepare(stormConf, context, collector);
- _rrQueue = new KeyedRoundRobinQueue<Tuple>();
- _executor = new Thread(new Runnable() {
- public void run() {
- try {
- while (true) {
- _delegate.execute(_rrQueue.take());
- }
- } catch (InterruptedException e) {
-
- }
- }
- });
- _executor.setDaemon(true);
- _executor.start();
- }
-
- public void execute(Tuple input) {
- Object key = input.getValue(0);
- _rrQueue.add(key, input);
- }
-
- public void cleanup() {
- _executor.interrupt();
- _delegate.cleanup();
- }
-
- public void declareOutputFields(OutputFieldsDeclarer declarer) {
- _delegate.declareOutputFields(declarer);
- }
-
- public void finishedId(Object id) {
- if (_callback != null) {
- _callback.finishedId(id);
- }
- }
-
- @Override
- public Map<String, Object> getComponentConfiguration() {
- return new HashMap<String, Object>();
- }
-}
http://git-wip-us.apache.org/repos/asf/storm/blob/e935da91/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
----------------------------------------------------------------------
diff --git a/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java b/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
deleted file mode 100644
index 5277cff..0000000
--- a/jstorm-client/src/main/java/backtype/storm/drpc/LinearDRPCInputDeclarer.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package backtype.storm.drpc;
-
-import backtype.storm.grouping.CustomStreamGrouping;
-import backtype.storm.topology.ComponentConfigurationDeclarer;
-import backtype.storm.tuple.Fields;
-
-public interface LinearDRPCInputDeclarer extends
- ComponentConfigurationDeclarer<LinearDRPCInputDeclarer> {
- public LinearDRPCInputDeclarer fieldsGrouping(Fields fields);
-
- public LinearDRPCInputDeclarer fieldsGrouping(String streamId, Fields fields);
-
- public LinearDRPCInputDeclarer globalGrouping();
-
- public LinearDRPCInputDeclarer globalGrouping(String streamId);
-
- public LinearDRPCInputDeclarer shuffleGrouping();
-
- public LinearDRPCInputDeclarer shuffleGrouping(String streamId);
-
- public LinearDRPCInputDeclarer localOrShuffleGrouping();
-
- public LinearDRPCInputDeclarer localOrShuffleGrouping(String streamId);
-
- public LinearDRPCInputDeclarer noneGrouping();
-
- public LinearDRPCInputDeclarer noneGrouping(String streamId);
-
- public LinearDRPCInputDeclarer allGrouping();
-
- public LinearDRPCInputDeclarer allGrouping(String streamId);
-
- public LinearDRPCInputDeclarer directGrouping();
-
- public LinearDRPCInputDeclarer directGrouping(String streamId);
-
- public LinearDRPCInputDeclarer customGrouping(CustomStreamGrouping grouping);
-
- public LinearDRPCInputDeclarer customGrouping(String streamId,
- CustomStreamGrouping grouping);
-
-}