You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@storm.apache.org by bo...@apache.org on 2015/12/04 16:04:00 UTC
[05/17] storm git commit: Blobstore API STORM- 876
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
index 5ef4b5b..875c2ba 100644
--- a/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/NodeInfo.java
@@ -461,13 +461,13 @@ public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fie
case 2: // PORT
if (schemeField.type == org.apache.thrift.protocol.TType.SET) {
{
- org.apache.thrift.protocol.TSet _set524 = iprot.readSetBegin();
- struct.port = new HashSet<Long>(2*_set524.size);
- long _elem525;
- for (int _i526 = 0; _i526 < _set524.size; ++_i526)
+ org.apache.thrift.protocol.TSet _set540 = iprot.readSetBegin();
+ struct.port = new HashSet<Long>(2*_set540.size);
+ long _elem541;
+ for (int _i542 = 0; _i542 < _set540.size; ++_i542)
{
- _elem525 = iprot.readI64();
- struct.port.add(_elem525);
+ _elem541 = iprot.readI64();
+ struct.port.add(_elem541);
}
iprot.readSetEnd();
}
@@ -498,9 +498,9 @@ public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fie
oprot.writeFieldBegin(PORT_FIELD_DESC);
{
oprot.writeSetBegin(new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I64, struct.port.size()));
- for (long _iter527 : struct.port)
+ for (long _iter543 : struct.port)
{
- oprot.writeI64(_iter527);
+ oprot.writeI64(_iter543);
}
oprot.writeSetEnd();
}
@@ -526,9 +526,9 @@ public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fie
oprot.writeString(struct.node);
{
oprot.writeI32(struct.port.size());
- for (long _iter528 : struct.port)
+ for (long _iter544 : struct.port)
{
- oprot.writeI64(_iter528);
+ oprot.writeI64(_iter544);
}
}
}
@@ -539,13 +539,13 @@ public class NodeInfo implements org.apache.thrift.TBase<NodeInfo, NodeInfo._Fie
struct.node = iprot.readString();
struct.set_node_isSet(true);
{
- org.apache.thrift.protocol.TSet _set529 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I64, iprot.readI32());
- struct.port = new HashSet<Long>(2*_set529.size);
- long _elem530;
- for (int _i531 = 0; _i531 < _set529.size; ++_i531)
+ org.apache.thrift.protocol.TSet _set545 = new org.apache.thrift.protocol.TSet(org.apache.thrift.protocol.TType.I64, iprot.readI32());
+ struct.port = new HashSet<Long>(2*_set545.size);
+ long _elem546;
+ for (int _i547 = 0; _i547 < _set545.size; ++_i547)
{
- _elem530 = iprot.readI64();
- struct.port.add(_elem530);
+ _elem546 = iprot.readI64();
+ struct.port.add(_elem546);
}
}
struct.set_port_isSet(true);
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/ReadableBlobMeta.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/ReadableBlobMeta.java b/storm-core/src/jvm/backtype/storm/generated/ReadableBlobMeta.java
new file mode 100644
index 0000000..69355a5
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/ReadableBlobMeta.java
@@ -0,0 +1,510 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+public class ReadableBlobMeta implements org.apache.thrift.TBase<ReadableBlobMeta, ReadableBlobMeta._Fields>, java.io.Serializable, Cloneable, Comparable<ReadableBlobMeta> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ReadableBlobMeta");
+
+ private static final org.apache.thrift.protocol.TField SETTABLE_FIELD_DESC = new org.apache.thrift.protocol.TField("settable", org.apache.thrift.protocol.TType.STRUCT, (short)1);
+ private static final org.apache.thrift.protocol.TField VERSION_FIELD_DESC = new org.apache.thrift.protocol.TField("version", org.apache.thrift.protocol.TType.I64, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new ReadableBlobMetaStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new ReadableBlobMetaTupleSchemeFactory());
+ }
+
+ private SettableBlobMeta settable; // required
+ private long version; // required
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ SETTABLE((short)1, "settable"),
+ VERSION((short)2, "version");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // SETTABLE
+ return SETTABLE;
+ case 2: // VERSION
+ return VERSION;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __VERSION_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.SETTABLE, new org.apache.thrift.meta_data.FieldMetaData("settable", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, SettableBlobMeta.class)));
+ tmpMap.put(_Fields.VERSION, new org.apache.thrift.meta_data.FieldMetaData("version", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(ReadableBlobMeta.class, metaDataMap);
+ }
+
+ public ReadableBlobMeta() {
+ }
+
+ public ReadableBlobMeta(
+ SettableBlobMeta settable,
+ long version)
+ {
+ this();
+ this.settable = settable;
+ this.version = version;
+ set_version_isSet(true);
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public ReadableBlobMeta(ReadableBlobMeta other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.is_set_settable()) {
+ this.settable = new SettableBlobMeta(other.settable);
+ }
+ this.version = other.version;
+ }
+
+ public ReadableBlobMeta deepCopy() {
+ return new ReadableBlobMeta(this);
+ }
+
+ @Override
+ public void clear() {
+ this.settable = null;
+ set_version_isSet(false);
+ this.version = 0;
+ }
+
+ public SettableBlobMeta get_settable() {
+ return this.settable;
+ }
+
+ public void set_settable(SettableBlobMeta settable) {
+ this.settable = settable;
+ }
+
+ public void unset_settable() {
+ this.settable = null;
+ }
+
+ /** Returns true if field settable is set (has been assigned a value) and false otherwise */
+ public boolean is_set_settable() {
+ return this.settable != null;
+ }
+
+ public void set_settable_isSet(boolean value) {
+ if (!value) {
+ this.settable = null;
+ }
+ }
+
+ public long get_version() {
+ return this.version;
+ }
+
+ public void set_version(long version) {
+ this.version = version;
+ set_version_isSet(true);
+ }
+
+ public void unset_version() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __VERSION_ISSET_ID);
+ }
+
+ /** Returns true if field version is set (has been assigned a value) and false otherwise */
+ public boolean is_set_version() {
+ return EncodingUtils.testBit(__isset_bitfield, __VERSION_ISSET_ID);
+ }
+
+ public void set_version_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __VERSION_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case SETTABLE:
+ if (value == null) {
+ unset_settable();
+ } else {
+ set_settable((SettableBlobMeta)value);
+ }
+ break;
+
+ case VERSION:
+ if (value == null) {
+ unset_version();
+ } else {
+ set_version((Long)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case SETTABLE:
+ return get_settable();
+
+ case VERSION:
+ return get_version();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case SETTABLE:
+ return is_set_settable();
+ case VERSION:
+ return is_set_version();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof ReadableBlobMeta)
+ return this.equals((ReadableBlobMeta)that);
+ return false;
+ }
+
+ public boolean equals(ReadableBlobMeta that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_settable = true && this.is_set_settable();
+ boolean that_present_settable = true && that.is_set_settable();
+ if (this_present_settable || that_present_settable) {
+ if (!(this_present_settable && that_present_settable))
+ return false;
+ if (!this.settable.equals(that.settable))
+ return false;
+ }
+
+ boolean this_present_version = true;
+ boolean that_present_version = true;
+ if (this_present_version || that_present_version) {
+ if (!(this_present_version && that_present_version))
+ return false;
+ if (this.version != that.version)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_settable = true && (is_set_settable());
+ list.add(present_settable);
+ if (present_settable)
+ list.add(settable);
+
+ boolean present_version = true;
+ list.add(present_version);
+ if (present_version)
+ list.add(version);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(ReadableBlobMeta other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_settable()).compareTo(other.is_set_settable());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_settable()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.settable, other.settable);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_version()).compareTo(other.is_set_version());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_version()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.version, other.version);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ReadableBlobMeta(");
+ boolean first = true;
+
+ sb.append("settable:");
+ if (this.settable == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.settable);
+ }
+ first = false;
+ if (!first) sb.append(", ");
+ sb.append("version:");
+ sb.append(this.version);
+ first = false;
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_settable()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'settable' is unset! Struct:" + toString());
+ }
+
+ if (!is_set_version()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'version' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ if (settable != null) {
+ settable.validate();
+ }
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class ReadableBlobMetaStandardSchemeFactory implements SchemeFactory {
+ public ReadableBlobMetaStandardScheme getScheme() {
+ return new ReadableBlobMetaStandardScheme();
+ }
+ }
+
+ private static class ReadableBlobMetaStandardScheme extends StandardScheme<ReadableBlobMeta> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // SETTABLE
+ if (schemeField.type == org.apache.thrift.protocol.TType.STRUCT) {
+ struct.settable = new SettableBlobMeta();
+ struct.settable.read(iprot);
+ struct.set_settable_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // VERSION
+ if (schemeField.type == org.apache.thrift.protocol.TType.I64) {
+ struct.version = iprot.readI64();
+ struct.set_version_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.settable != null) {
+ oprot.writeFieldBegin(SETTABLE_FIELD_DESC);
+ struct.settable.write(oprot);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldBegin(VERSION_FIELD_DESC);
+ oprot.writeI64(struct.version);
+ oprot.writeFieldEnd();
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class ReadableBlobMetaTupleSchemeFactory implements SchemeFactory {
+ public ReadableBlobMetaTupleScheme getScheme() {
+ return new ReadableBlobMetaTupleScheme();
+ }
+ }
+
+ private static class ReadableBlobMetaTupleScheme extends TupleScheme<ReadableBlobMeta> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ struct.settable.write(oprot);
+ oprot.writeI64(struct.version);
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, ReadableBlobMeta struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ struct.settable = new SettableBlobMeta();
+ struct.settable.read(iprot);
+ struct.set_settable_isSet(true);
+ struct.version = iprot.readI64();
+ struct.set_version_isSet(true);
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/SettableBlobMeta.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SettableBlobMeta.java b/storm-core/src/jvm/backtype/storm/generated/SettableBlobMeta.java
new file mode 100644
index 0000000..b3dd9d8
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/generated/SettableBlobMeta.java
@@ -0,0 +1,567 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+ * Autogenerated by Thrift Compiler (0.9.3)
+ *
+ * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+ * @generated
+ */
+package backtype.storm.generated;
+
+import org.apache.thrift.scheme.IScheme;
+import org.apache.thrift.scheme.SchemeFactory;
+import org.apache.thrift.scheme.StandardScheme;
+
+import org.apache.thrift.scheme.TupleScheme;
+import org.apache.thrift.protocol.TTupleProtocol;
+import org.apache.thrift.protocol.TProtocolException;
+import org.apache.thrift.EncodingUtils;
+import org.apache.thrift.TException;
+import org.apache.thrift.async.AsyncMethodCallback;
+import org.apache.thrift.server.AbstractNonblockingServer.*;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Map;
+import java.util.HashMap;
+import java.util.EnumMap;
+import java.util.Set;
+import java.util.HashSet;
+import java.util.EnumSet;
+import java.util.Collections;
+import java.util.BitSet;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import javax.annotation.Generated;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked"})
+@Generated(value = "Autogenerated by Thrift Compiler (0.9.3)")
+public class SettableBlobMeta implements org.apache.thrift.TBase<SettableBlobMeta, SettableBlobMeta._Fields>, java.io.Serializable, Cloneable, Comparable<SettableBlobMeta> {
+ private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("SettableBlobMeta");
+
+ private static final org.apache.thrift.protocol.TField ACL_FIELD_DESC = new org.apache.thrift.protocol.TField("acl", org.apache.thrift.protocol.TType.LIST, (short)1);
+ private static final org.apache.thrift.protocol.TField REPLICATION_FACTOR_FIELD_DESC = new org.apache.thrift.protocol.TField("replication_factor", org.apache.thrift.protocol.TType.I32, (short)2);
+
+ private static final Map<Class<? extends IScheme>, SchemeFactory> schemes = new HashMap<Class<? extends IScheme>, SchemeFactory>();
+ static {
+ schemes.put(StandardScheme.class, new SettableBlobMetaStandardSchemeFactory());
+ schemes.put(TupleScheme.class, new SettableBlobMetaTupleSchemeFactory());
+ }
+
+ private List<AccessControl> acl; // required
+ private int replication_factor; // optional
+
+ /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */
+ public enum _Fields implements org.apache.thrift.TFieldIdEnum {
+ ACL((short)1, "acl"),
+ REPLICATION_FACTOR((short)2, "replication_factor");
+
+ private static final Map<String, _Fields> byName = new HashMap<String, _Fields>();
+
+ static {
+ for (_Fields field : EnumSet.allOf(_Fields.class)) {
+ byName.put(field.getFieldName(), field);
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, or null if its not found.
+ */
+ public static _Fields findByThriftId(int fieldId) {
+ switch(fieldId) {
+ case 1: // ACL
+ return ACL;
+ case 2: // REPLICATION_FACTOR
+ return REPLICATION_FACTOR;
+ default:
+ return null;
+ }
+ }
+
+ /**
+ * Find the _Fields constant that matches fieldId, throwing an exception
+ * if it is not found.
+ */
+ public static _Fields findByThriftIdOrThrow(int fieldId) {
+ _Fields fields = findByThriftId(fieldId);
+ if (fields == null) throw new IllegalArgumentException("Field " + fieldId + " doesn't exist!");
+ return fields;
+ }
+
+ /**
+ * Find the _Fields constant that matches name, or null if its not found.
+ */
+ public static _Fields findByName(String name) {
+ return byName.get(name);
+ }
+
+ private final short _thriftId;
+ private final String _fieldName;
+
+ _Fields(short thriftId, String fieldName) {
+ _thriftId = thriftId;
+ _fieldName = fieldName;
+ }
+
+ public short getThriftFieldId() {
+ return _thriftId;
+ }
+
+ public String getFieldName() {
+ return _fieldName;
+ }
+ }
+
+ // isset id assignments
+ private static final int __REPLICATION_FACTOR_ISSET_ID = 0;
+ private byte __isset_bitfield = 0;
+ private static final _Fields optionals[] = {_Fields.REPLICATION_FACTOR};
+ public static final Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap;
+ static {
+ Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class);
+ tmpMap.put(_Fields.ACL, new org.apache.thrift.meta_data.FieldMetaData("acl", org.apache.thrift.TFieldRequirementType.REQUIRED,
+ new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST,
+ new org.apache.thrift.meta_data.StructMetaData(org.apache.thrift.protocol.TType.STRUCT, AccessControl.class))));
+ tmpMap.put(_Fields.REPLICATION_FACTOR, new org.apache.thrift.meta_data.FieldMetaData("replication_factor", org.apache.thrift.TFieldRequirementType.OPTIONAL,
+ new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I32)));
+ metaDataMap = Collections.unmodifiableMap(tmpMap);
+ org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(SettableBlobMeta.class, metaDataMap);
+ }
+
+ public SettableBlobMeta() {
+ }
+
+ public SettableBlobMeta(
+ List<AccessControl> acl)
+ {
+ this();
+ this.acl = acl;
+ }
+
+ /**
+ * Performs a deep copy on <i>other</i>.
+ */
+ public SettableBlobMeta(SettableBlobMeta other) {
+ __isset_bitfield = other.__isset_bitfield;
+ if (other.is_set_acl()) {
+ List<AccessControl> __this__acl = new ArrayList<AccessControl>(other.acl.size());
+ for (AccessControl other_element : other.acl) {
+ __this__acl.add(new AccessControl(other_element));
+ }
+ this.acl = __this__acl;
+ }
+ this.replication_factor = other.replication_factor;
+ }
+
+ public SettableBlobMeta deepCopy() {
+ return new SettableBlobMeta(this);
+ }
+
+ @Override
+ public void clear() {
+ this.acl = null;
+ set_replication_factor_isSet(false);
+ this.replication_factor = 0;
+ }
+
+ public int get_acl_size() {
+ return (this.acl == null) ? 0 : this.acl.size();
+ }
+
+ public java.util.Iterator<AccessControl> get_acl_iterator() {
+ return (this.acl == null) ? null : this.acl.iterator();
+ }
+
+ public void add_to_acl(AccessControl elem) {
+ if (this.acl == null) {
+ this.acl = new ArrayList<AccessControl>();
+ }
+ this.acl.add(elem);
+ }
+
+ public List<AccessControl> get_acl() {
+ return this.acl;
+ }
+
+ public void set_acl(List<AccessControl> acl) {
+ this.acl = acl;
+ }
+
+ public void unset_acl() {
+ this.acl = null;
+ }
+
+ /** Returns true if field acl is set (has been assigned a value) and false otherwise */
+ public boolean is_set_acl() {
+ return this.acl != null;
+ }
+
+ public void set_acl_isSet(boolean value) {
+ if (!value) {
+ this.acl = null;
+ }
+ }
+
+ public int get_replication_factor() {
+ return this.replication_factor;
+ }
+
+ public void set_replication_factor(int replication_factor) {
+ this.replication_factor = replication_factor;
+ set_replication_factor_isSet(true);
+ }
+
+ public void unset_replication_factor() {
+ __isset_bitfield = EncodingUtils.clearBit(__isset_bitfield, __REPLICATION_FACTOR_ISSET_ID);
+ }
+
+ /** Returns true if field replication_factor is set (has been assigned a value) and false otherwise */
+ public boolean is_set_replication_factor() {
+ return EncodingUtils.testBit(__isset_bitfield, __REPLICATION_FACTOR_ISSET_ID);
+ }
+
+ public void set_replication_factor_isSet(boolean value) {
+ __isset_bitfield = EncodingUtils.setBit(__isset_bitfield, __REPLICATION_FACTOR_ISSET_ID, value);
+ }
+
+ public void setFieldValue(_Fields field, Object value) {
+ switch (field) {
+ case ACL:
+ if (value == null) {
+ unset_acl();
+ } else {
+ set_acl((List<AccessControl>)value);
+ }
+ break;
+
+ case REPLICATION_FACTOR:
+ if (value == null) {
+ unset_replication_factor();
+ } else {
+ set_replication_factor((Integer)value);
+ }
+ break;
+
+ }
+ }
+
+ public Object getFieldValue(_Fields field) {
+ switch (field) {
+ case ACL:
+ return get_acl();
+
+ case REPLICATION_FACTOR:
+ return get_replication_factor();
+
+ }
+ throw new IllegalStateException();
+ }
+
+ /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */
+ public boolean isSet(_Fields field) {
+ if (field == null) {
+ throw new IllegalArgumentException();
+ }
+
+ switch (field) {
+ case ACL:
+ return is_set_acl();
+ case REPLICATION_FACTOR:
+ return is_set_replication_factor();
+ }
+ throw new IllegalStateException();
+ }
+
+ @Override
+ public boolean equals(Object that) {
+ if (that == null)
+ return false;
+ if (that instanceof SettableBlobMeta)
+ return this.equals((SettableBlobMeta)that);
+ return false;
+ }
+
+ public boolean equals(SettableBlobMeta that) {
+ if (that == null)
+ return false;
+
+ boolean this_present_acl = true && this.is_set_acl();
+ boolean that_present_acl = true && that.is_set_acl();
+ if (this_present_acl || that_present_acl) {
+ if (!(this_present_acl && that_present_acl))
+ return false;
+ if (!this.acl.equals(that.acl))
+ return false;
+ }
+
+ boolean this_present_replication_factor = true && this.is_set_replication_factor();
+ boolean that_present_replication_factor = true && that.is_set_replication_factor();
+ if (this_present_replication_factor || that_present_replication_factor) {
+ if (!(this_present_replication_factor && that_present_replication_factor))
+ return false;
+ if (this.replication_factor != that.replication_factor)
+ return false;
+ }
+
+ return true;
+ }
+
+ @Override
+ public int hashCode() {
+ List<Object> list = new ArrayList<Object>();
+
+ boolean present_acl = true && (is_set_acl());
+ list.add(present_acl);
+ if (present_acl)
+ list.add(acl);
+
+ boolean present_replication_factor = true && (is_set_replication_factor());
+ list.add(present_replication_factor);
+ if (present_replication_factor)
+ list.add(replication_factor);
+
+ return list.hashCode();
+ }
+
+ @Override
+ public int compareTo(SettableBlobMeta other) {
+ if (!getClass().equals(other.getClass())) {
+ return getClass().getName().compareTo(other.getClass().getName());
+ }
+
+ int lastComparison = 0;
+
+ lastComparison = Boolean.valueOf(is_set_acl()).compareTo(other.is_set_acl());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_acl()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.acl, other.acl);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ lastComparison = Boolean.valueOf(is_set_replication_factor()).compareTo(other.is_set_replication_factor());
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ if (is_set_replication_factor()) {
+ lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.replication_factor, other.replication_factor);
+ if (lastComparison != 0) {
+ return lastComparison;
+ }
+ }
+ return 0;
+ }
+
+ public _Fields fieldForId(int fieldId) {
+ return _Fields.findByThriftId(fieldId);
+ }
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException {
+ schemes.get(iprot.getScheme()).getScheme().read(iprot, this);
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException {
+ schemes.get(oprot.getScheme()).getScheme().write(oprot, this);
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("SettableBlobMeta(");
+ boolean first = true;
+
+ sb.append("acl:");
+ if (this.acl == null) {
+ sb.append("null");
+ } else {
+ sb.append(this.acl);
+ }
+ first = false;
+ if (is_set_replication_factor()) {
+ if (!first) sb.append(", ");
+ sb.append("replication_factor:");
+ sb.append(this.replication_factor);
+ first = false;
+ }
+ sb.append(")");
+ return sb.toString();
+ }
+
+ public void validate() throws org.apache.thrift.TException {
+ // check for required fields
+ if (!is_set_acl()) {
+ throw new org.apache.thrift.protocol.TProtocolException("Required field 'acl' is unset! Struct:" + toString());
+ }
+
+ // check for sub-struct validity
+ }
+
+ private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException {
+ try {
+ write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, ClassNotFoundException {
+ try {
+ // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor.
+ __isset_bitfield = 0;
+ read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in)));
+ } catch (org.apache.thrift.TException te) {
+ throw new java.io.IOException(te);
+ }
+ }
+
+ private static class SettableBlobMetaStandardSchemeFactory implements SchemeFactory {
+ public SettableBlobMetaStandardScheme getScheme() {
+ return new SettableBlobMetaStandardScheme();
+ }
+ }
+
+ private static class SettableBlobMetaStandardScheme extends StandardScheme<SettableBlobMeta> {
+
+ public void read(org.apache.thrift.protocol.TProtocol iprot, SettableBlobMeta struct) throws org.apache.thrift.TException {
+ org.apache.thrift.protocol.TField schemeField;
+ iprot.readStructBegin();
+ while (true)
+ {
+ schemeField = iprot.readFieldBegin();
+ if (schemeField.type == org.apache.thrift.protocol.TType.STOP) {
+ break;
+ }
+ switch (schemeField.id) {
+ case 1: // ACL
+ if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
+ {
+ org.apache.thrift.protocol.TList _list488 = iprot.readListBegin();
+ struct.acl = new ArrayList<AccessControl>(_list488.size);
+ AccessControl _elem489;
+ for (int _i490 = 0; _i490 < _list488.size; ++_i490)
+ {
+ _elem489 = new AccessControl();
+ _elem489.read(iprot);
+ struct.acl.add(_elem489);
+ }
+ iprot.readListEnd();
+ }
+ struct.set_acl_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ case 2: // REPLICATION_FACTOR
+ if (schemeField.type == org.apache.thrift.protocol.TType.I32) {
+ struct.replication_factor = iprot.readI32();
+ struct.set_replication_factor_isSet(true);
+ } else {
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ break;
+ default:
+ org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type);
+ }
+ iprot.readFieldEnd();
+ }
+ iprot.readStructEnd();
+ struct.validate();
+ }
+
+ public void write(org.apache.thrift.protocol.TProtocol oprot, SettableBlobMeta struct) throws org.apache.thrift.TException {
+ struct.validate();
+
+ oprot.writeStructBegin(STRUCT_DESC);
+ if (struct.acl != null) {
+ oprot.writeFieldBegin(ACL_FIELD_DESC);
+ {
+ oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, struct.acl.size()));
+ for (AccessControl _iter491 : struct.acl)
+ {
+ _iter491.write(oprot);
+ }
+ oprot.writeListEnd();
+ }
+ oprot.writeFieldEnd();
+ }
+ if (struct.is_set_replication_factor()) {
+ oprot.writeFieldBegin(REPLICATION_FACTOR_FIELD_DESC);
+ oprot.writeI32(struct.replication_factor);
+ oprot.writeFieldEnd();
+ }
+ oprot.writeFieldStop();
+ oprot.writeStructEnd();
+ }
+
+ }
+
+ private static class SettableBlobMetaTupleSchemeFactory implements SchemeFactory {
+ public SettableBlobMetaTupleScheme getScheme() {
+ return new SettableBlobMetaTupleScheme();
+ }
+ }
+
+ private static class SettableBlobMetaTupleScheme extends TupleScheme<SettableBlobMeta> {
+
+ @Override
+ public void write(org.apache.thrift.protocol.TProtocol prot, SettableBlobMeta struct) throws org.apache.thrift.TException {
+ TTupleProtocol oprot = (TTupleProtocol) prot;
+ {
+ oprot.writeI32(struct.acl.size());
+ for (AccessControl _iter492 : struct.acl)
+ {
+ _iter492.write(oprot);
+ }
+ }
+ BitSet optionals = new BitSet();
+ if (struct.is_set_replication_factor()) {
+ optionals.set(0);
+ }
+ oprot.writeBitSet(optionals, 1);
+ if (struct.is_set_replication_factor()) {
+ oprot.writeI32(struct.replication_factor);
+ }
+ }
+
+ @Override
+ public void read(org.apache.thrift.protocol.TProtocol prot, SettableBlobMeta struct) throws org.apache.thrift.TException {
+ TTupleProtocol iprot = (TTupleProtocol) prot;
+ {
+ org.apache.thrift.protocol.TList _list493 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.acl = new ArrayList<AccessControl>(_list493.size);
+ AccessControl _elem494;
+ for (int _i495 = 0; _i495 < _list493.size; ++_i495)
+ {
+ _elem494 = new AccessControl();
+ _elem494.read(iprot);
+ struct.acl.add(_elem494);
+ }
+ }
+ struct.set_acl_isSet(true);
+ BitSet incoming = iprot.readBitSet(1);
+ if (incoming.get(0)) {
+ struct.replication_factor = iprot.readI32();
+ struct.set_replication_factor_isSet(true);
+ }
+ }
+ }
+
+}
+
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/StormBase.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/StormBase.java b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
index 5f80c59..1bc6ff9 100644
--- a/storm-core/src/jvm/backtype/storm/generated/StormBase.java
+++ b/storm-core/src/jvm/backtype/storm/generated/StormBase.java
@@ -1090,15 +1090,15 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
case 4: // COMPONENT_EXECUTORS
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map588 = iprot.readMapBegin();
- struct.component_executors = new HashMap<String,Integer>(2*_map588.size);
- String _key589;
- int _val590;
- for (int _i591 = 0; _i591 < _map588.size; ++_i591)
+ org.apache.thrift.protocol.TMap _map604 = iprot.readMapBegin();
+ struct.component_executors = new HashMap<String,Integer>(2*_map604.size);
+ String _key605;
+ int _val606;
+ for (int _i607 = 0; _i607 < _map604.size; ++_i607)
{
- _key589 = iprot.readString();
- _val590 = iprot.readI32();
- struct.component_executors.put(_key589, _val590);
+ _key605 = iprot.readString();
+ _val606 = iprot.readI32();
+ struct.component_executors.put(_key605, _val606);
}
iprot.readMapEnd();
}
@@ -1143,16 +1143,16 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
case 9: // COMPONENT_DEBUG
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map592 = iprot.readMapBegin();
- struct.component_debug = new HashMap<String,DebugOptions>(2*_map592.size);
- String _key593;
- DebugOptions _val594;
- for (int _i595 = 0; _i595 < _map592.size; ++_i595)
+ org.apache.thrift.protocol.TMap _map608 = iprot.readMapBegin();
+ struct.component_debug = new HashMap<String,DebugOptions>(2*_map608.size);
+ String _key609;
+ DebugOptions _val610;
+ for (int _i611 = 0; _i611 < _map608.size; ++_i611)
{
- _key593 = iprot.readString();
- _val594 = new DebugOptions();
- _val594.read(iprot);
- struct.component_debug.put(_key593, _val594);
+ _key609 = iprot.readString();
+ _val610 = new DebugOptions();
+ _val610.read(iprot);
+ struct.component_debug.put(_key609, _val610);
}
iprot.readMapEnd();
}
@@ -1192,10 +1192,10 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
oprot.writeFieldBegin(COMPONENT_EXECUTORS_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I32, struct.component_executors.size()));
- for (Map.Entry<String, Integer> _iter596 : struct.component_executors.entrySet())
+ for (Map.Entry<String, Integer> _iter612 : struct.component_executors.entrySet())
{
- oprot.writeString(_iter596.getKey());
- oprot.writeI32(_iter596.getValue());
+ oprot.writeString(_iter612.getKey());
+ oprot.writeI32(_iter612.getValue());
}
oprot.writeMapEnd();
}
@@ -1233,10 +1233,10 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
oprot.writeFieldBegin(COMPONENT_DEBUG_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, struct.component_debug.size()));
- for (Map.Entry<String, DebugOptions> _iter597 : struct.component_debug.entrySet())
+ for (Map.Entry<String, DebugOptions> _iter613 : struct.component_debug.entrySet())
{
- oprot.writeString(_iter597.getKey());
- _iter597.getValue().write(oprot);
+ oprot.writeString(_iter613.getKey());
+ _iter613.getValue().write(oprot);
}
oprot.writeMapEnd();
}
@@ -1286,10 +1286,10 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
if (struct.is_set_component_executors()) {
{
oprot.writeI32(struct.component_executors.size());
- for (Map.Entry<String, Integer> _iter598 : struct.component_executors.entrySet())
+ for (Map.Entry<String, Integer> _iter614 : struct.component_executors.entrySet())
{
- oprot.writeString(_iter598.getKey());
- oprot.writeI32(_iter598.getValue());
+ oprot.writeString(_iter614.getKey());
+ oprot.writeI32(_iter614.getValue());
}
}
}
@@ -1308,10 +1308,10 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
if (struct.is_set_component_debug()) {
{
oprot.writeI32(struct.component_debug.size());
- for (Map.Entry<String, DebugOptions> _iter599 : struct.component_debug.entrySet())
+ for (Map.Entry<String, DebugOptions> _iter615 : struct.component_debug.entrySet())
{
- oprot.writeString(_iter599.getKey());
- _iter599.getValue().write(oprot);
+ oprot.writeString(_iter615.getKey());
+ _iter615.getValue().write(oprot);
}
}
}
@@ -1329,15 +1329,15 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
BitSet incoming = iprot.readBitSet(6);
if (incoming.get(0)) {
{
- org.apache.thrift.protocol.TMap _map600 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I32, iprot.readI32());
- struct.component_executors = new HashMap<String,Integer>(2*_map600.size);
- String _key601;
- int _val602;
- for (int _i603 = 0; _i603 < _map600.size; ++_i603)
+ org.apache.thrift.protocol.TMap _map616 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I32, iprot.readI32());
+ struct.component_executors = new HashMap<String,Integer>(2*_map616.size);
+ String _key617;
+ int _val618;
+ for (int _i619 = 0; _i619 < _map616.size; ++_i619)
{
- _key601 = iprot.readString();
- _val602 = iprot.readI32();
- struct.component_executors.put(_key601, _val602);
+ _key617 = iprot.readString();
+ _val618 = iprot.readI32();
+ struct.component_executors.put(_key617, _val618);
}
}
struct.set_component_executors_isSet(true);
@@ -1361,16 +1361,16 @@ public class StormBase implements org.apache.thrift.TBase<StormBase, StormBase._
}
if (incoming.get(5)) {
{
- org.apache.thrift.protocol.TMap _map604 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
- struct.component_debug = new HashMap<String,DebugOptions>(2*_map604.size);
- String _key605;
- DebugOptions _val606;
- for (int _i607 = 0; _i607 < _map604.size; ++_i607)
+ org.apache.thrift.protocol.TMap _map620 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRUCT, iprot.readI32());
+ struct.component_debug = new HashMap<String,DebugOptions>(2*_map620.size);
+ String _key621;
+ DebugOptions _val622;
+ for (int _i623 = 0; _i623 < _map620.size; ++_i623)
{
- _key605 = iprot.readString();
- _val606 = new DebugOptions();
- _val606.read(iprot);
- struct.component_debug.put(_key605, _val606);
+ _key621 = iprot.readString();
+ _val622 = new DebugOptions();
+ _val622.read(iprot);
+ struct.component_debug.put(_key621, _val622);
}
}
struct.set_component_debug_isSet(true);
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
index 2ce5eb9..4d77c08 100644
--- a/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/SupervisorInfo.java
@@ -1085,13 +1085,13 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
case 4: // USED_PORTS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list488 = iprot.readListBegin();
- struct.used_ports = new ArrayList<Long>(_list488.size);
- long _elem489;
- for (int _i490 = 0; _i490 < _list488.size; ++_i490)
+ org.apache.thrift.protocol.TList _list504 = iprot.readListBegin();
+ struct.used_ports = new ArrayList<Long>(_list504.size);
+ long _elem505;
+ for (int _i506 = 0; _i506 < _list504.size; ++_i506)
{
- _elem489 = iprot.readI64();
- struct.used_ports.add(_elem489);
+ _elem505 = iprot.readI64();
+ struct.used_ports.add(_elem505);
}
iprot.readListEnd();
}
@@ -1103,13 +1103,13 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
case 5: // META
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list491 = iprot.readListBegin();
- struct.meta = new ArrayList<Long>(_list491.size);
- long _elem492;
- for (int _i493 = 0; _i493 < _list491.size; ++_i493)
+ org.apache.thrift.protocol.TList _list507 = iprot.readListBegin();
+ struct.meta = new ArrayList<Long>(_list507.size);
+ long _elem508;
+ for (int _i509 = 0; _i509 < _list507.size; ++_i509)
{
- _elem492 = iprot.readI64();
- struct.meta.add(_elem492);
+ _elem508 = iprot.readI64();
+ struct.meta.add(_elem508);
}
iprot.readListEnd();
}
@@ -1121,15 +1121,15 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
case 6: // SCHEDULER_META
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map494 = iprot.readMapBegin();
- struct.scheduler_meta = new HashMap<String,String>(2*_map494.size);
- String _key495;
- String _val496;
- for (int _i497 = 0; _i497 < _map494.size; ++_i497)
+ org.apache.thrift.protocol.TMap _map510 = iprot.readMapBegin();
+ struct.scheduler_meta = new HashMap<String,String>(2*_map510.size);
+ String _key511;
+ String _val512;
+ for (int _i513 = 0; _i513 < _map510.size; ++_i513)
{
- _key495 = iprot.readString();
- _val496 = iprot.readString();
- struct.scheduler_meta.put(_key495, _val496);
+ _key511 = iprot.readString();
+ _val512 = iprot.readString();
+ struct.scheduler_meta.put(_key511, _val512);
}
iprot.readMapEnd();
}
@@ -1157,15 +1157,15 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
case 9: // RESOURCES_MAP
if (schemeField.type == org.apache.thrift.protocol.TType.MAP) {
{
- org.apache.thrift.protocol.TMap _map498 = iprot.readMapBegin();
- struct.resources_map = new HashMap<String,Double>(2*_map498.size);
- String _key499;
- double _val500;
- for (int _i501 = 0; _i501 < _map498.size; ++_i501)
+ org.apache.thrift.protocol.TMap _map514 = iprot.readMapBegin();
+ struct.resources_map = new HashMap<String,Double>(2*_map514.size);
+ String _key515;
+ double _val516;
+ for (int _i517 = 0; _i517 < _map514.size; ++_i517)
{
- _key499 = iprot.readString();
- _val500 = iprot.readDouble();
- struct.resources_map.put(_key499, _val500);
+ _key515 = iprot.readString();
+ _val516 = iprot.readDouble();
+ struct.resources_map.put(_key515, _val516);
}
iprot.readMapEnd();
}
@@ -1207,9 +1207,9 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
oprot.writeFieldBegin(USED_PORTS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.used_ports.size()));
- for (long _iter502 : struct.used_ports)
+ for (long _iter518 : struct.used_ports)
{
- oprot.writeI64(_iter502);
+ oprot.writeI64(_iter518);
}
oprot.writeListEnd();
}
@@ -1221,9 +1221,9 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
oprot.writeFieldBegin(META_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, struct.meta.size()));
- for (long _iter503 : struct.meta)
+ for (long _iter519 : struct.meta)
{
- oprot.writeI64(_iter503);
+ oprot.writeI64(_iter519);
}
oprot.writeListEnd();
}
@@ -1235,10 +1235,10 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
oprot.writeFieldBegin(SCHEDULER_META_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.scheduler_meta.size()));
- for (Map.Entry<String, String> _iter504 : struct.scheduler_meta.entrySet())
+ for (Map.Entry<String, String> _iter520 : struct.scheduler_meta.entrySet())
{
- oprot.writeString(_iter504.getKey());
- oprot.writeString(_iter504.getValue());
+ oprot.writeString(_iter520.getKey());
+ oprot.writeString(_iter520.getValue());
}
oprot.writeMapEnd();
}
@@ -1262,10 +1262,10 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
oprot.writeFieldBegin(RESOURCES_MAP_FIELD_DESC);
{
oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.DOUBLE, struct.resources_map.size()));
- for (Map.Entry<String, Double> _iter505 : struct.resources_map.entrySet())
+ for (Map.Entry<String, Double> _iter521 : struct.resources_map.entrySet())
{
- oprot.writeString(_iter505.getKey());
- oprot.writeDouble(_iter505.getValue());
+ oprot.writeString(_iter521.getKey());
+ oprot.writeDouble(_iter521.getValue());
}
oprot.writeMapEnd();
}
@@ -1320,28 +1320,28 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
if (struct.is_set_used_ports()) {
{
oprot.writeI32(struct.used_ports.size());
- for (long _iter506 : struct.used_ports)
+ for (long _iter522 : struct.used_ports)
{
- oprot.writeI64(_iter506);
+ oprot.writeI64(_iter522);
}
}
}
if (struct.is_set_meta()) {
{
oprot.writeI32(struct.meta.size());
- for (long _iter507 : struct.meta)
+ for (long _iter523 : struct.meta)
{
- oprot.writeI64(_iter507);
+ oprot.writeI64(_iter523);
}
}
}
if (struct.is_set_scheduler_meta()) {
{
oprot.writeI32(struct.scheduler_meta.size());
- for (Map.Entry<String, String> _iter508 : struct.scheduler_meta.entrySet())
+ for (Map.Entry<String, String> _iter524 : struct.scheduler_meta.entrySet())
{
- oprot.writeString(_iter508.getKey());
- oprot.writeString(_iter508.getValue());
+ oprot.writeString(_iter524.getKey());
+ oprot.writeString(_iter524.getValue());
}
}
}
@@ -1354,10 +1354,10 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
if (struct.is_set_resources_map()) {
{
oprot.writeI32(struct.resources_map.size());
- for (Map.Entry<String, Double> _iter509 : struct.resources_map.entrySet())
+ for (Map.Entry<String, Double> _iter525 : struct.resources_map.entrySet())
{
- oprot.writeString(_iter509.getKey());
- oprot.writeDouble(_iter509.getValue());
+ oprot.writeString(_iter525.getKey());
+ oprot.writeDouble(_iter525.getValue());
}
}
}
@@ -1377,41 +1377,41 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
}
if (incoming.get(1)) {
{
- org.apache.thrift.protocol.TList _list510 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
- struct.used_ports = new ArrayList<Long>(_list510.size);
- long _elem511;
- for (int _i512 = 0; _i512 < _list510.size; ++_i512)
+ org.apache.thrift.protocol.TList _list526 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
+ struct.used_ports = new ArrayList<Long>(_list526.size);
+ long _elem527;
+ for (int _i528 = 0; _i528 < _list526.size; ++_i528)
{
- _elem511 = iprot.readI64();
- struct.used_ports.add(_elem511);
+ _elem527 = iprot.readI64();
+ struct.used_ports.add(_elem527);
}
}
struct.set_used_ports_isSet(true);
}
if (incoming.get(2)) {
{
- org.apache.thrift.protocol.TList _list513 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
- struct.meta = new ArrayList<Long>(_list513.size);
- long _elem514;
- for (int _i515 = 0; _i515 < _list513.size; ++_i515)
+ org.apache.thrift.protocol.TList _list529 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.I64, iprot.readI32());
+ struct.meta = new ArrayList<Long>(_list529.size);
+ long _elem530;
+ for (int _i531 = 0; _i531 < _list529.size; ++_i531)
{
- _elem514 = iprot.readI64();
- struct.meta.add(_elem514);
+ _elem530 = iprot.readI64();
+ struct.meta.add(_elem530);
}
}
struct.set_meta_isSet(true);
}
if (incoming.get(3)) {
{
- org.apache.thrift.protocol.TMap _map516 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.scheduler_meta = new HashMap<String,String>(2*_map516.size);
- String _key517;
- String _val518;
- for (int _i519 = 0; _i519 < _map516.size; ++_i519)
+ org.apache.thrift.protocol.TMap _map532 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.scheduler_meta = new HashMap<String,String>(2*_map532.size);
+ String _key533;
+ String _val534;
+ for (int _i535 = 0; _i535 < _map532.size; ++_i535)
{
- _key517 = iprot.readString();
- _val518 = iprot.readString();
- struct.scheduler_meta.put(_key517, _val518);
+ _key533 = iprot.readString();
+ _val534 = iprot.readString();
+ struct.scheduler_meta.put(_key533, _val534);
}
}
struct.set_scheduler_meta_isSet(true);
@@ -1426,15 +1426,15 @@ public class SupervisorInfo implements org.apache.thrift.TBase<SupervisorInfo, S
}
if (incoming.get(6)) {
{
- org.apache.thrift.protocol.TMap _map520 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.DOUBLE, iprot.readI32());
- struct.resources_map = new HashMap<String,Double>(2*_map520.size);
- String _key521;
- double _val522;
- for (int _i523 = 0; _i523 < _map520.size; ++_i523)
+ org.apache.thrift.protocol.TMap _map536 = new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.DOUBLE, iprot.readI32());
+ struct.resources_map = new HashMap<String,Double>(2*_map536.size);
+ String _key537;
+ double _val538;
+ for (int _i539 = 0; _i539 < _map536.size; ++_i539)
{
- _key521 = iprot.readString();
- _val522 = iprot.readDouble();
- struct.resources_map.put(_key521, _val522);
+ _key537 = iprot.readString();
+ _val538 = iprot.readDouble();
+ struct.resources_map.put(_key537, _val538);
}
}
struct.set_resources_map_isSet(true);
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/generated/TopologyHistoryInfo.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/generated/TopologyHistoryInfo.java b/storm-core/src/jvm/backtype/storm/generated/TopologyHistoryInfo.java
index cced456..cc8ef3c 100644
--- a/storm-core/src/jvm/backtype/storm/generated/TopologyHistoryInfo.java
+++ b/storm-core/src/jvm/backtype/storm/generated/TopologyHistoryInfo.java
@@ -364,13 +364,13 @@ public class TopologyHistoryInfo implements org.apache.thrift.TBase<TopologyHist
case 1: // TOPO_IDS
if (schemeField.type == org.apache.thrift.protocol.TType.LIST) {
{
- org.apache.thrift.protocol.TList _list698 = iprot.readListBegin();
- struct.topo_ids = new ArrayList<String>(_list698.size);
- String _elem699;
- for (int _i700 = 0; _i700 < _list698.size; ++_i700)
+ org.apache.thrift.protocol.TList _list714 = iprot.readListBegin();
+ struct.topo_ids = new ArrayList<String>(_list714.size);
+ String _elem715;
+ for (int _i716 = 0; _i716 < _list714.size; ++_i716)
{
- _elem699 = iprot.readString();
- struct.topo_ids.add(_elem699);
+ _elem715 = iprot.readString();
+ struct.topo_ids.add(_elem715);
}
iprot.readListEnd();
}
@@ -396,9 +396,9 @@ public class TopologyHistoryInfo implements org.apache.thrift.TBase<TopologyHist
oprot.writeFieldBegin(TOPO_IDS_FIELD_DESC);
{
oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.topo_ids.size()));
- for (String _iter701 : struct.topo_ids)
+ for (String _iter717 : struct.topo_ids)
{
- oprot.writeString(_iter701);
+ oprot.writeString(_iter717);
}
oprot.writeListEnd();
}
@@ -429,9 +429,9 @@ public class TopologyHistoryInfo implements org.apache.thrift.TBase<TopologyHist
if (struct.is_set_topo_ids()) {
{
oprot.writeI32(struct.topo_ids.size());
- for (String _iter702 : struct.topo_ids)
+ for (String _iter718 : struct.topo_ids)
{
- oprot.writeString(_iter702);
+ oprot.writeString(_iter718);
}
}
}
@@ -443,13 +443,13 @@ public class TopologyHistoryInfo implements org.apache.thrift.TBase<TopologyHist
BitSet incoming = iprot.readBitSet(1);
if (incoming.get(0)) {
{
- org.apache.thrift.protocol.TList _list703 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
- struct.topo_ids = new ArrayList<String>(_list703.size);
- String _elem704;
- for (int _i705 = 0; _i705 < _list703.size; ++_i705)
+ org.apache.thrift.protocol.TList _list719 = new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, iprot.readI32());
+ struct.topo_ids = new ArrayList<String>(_list719.size);
+ String _elem720;
+ for (int _i721 = 0; _i721 < _list719.size; ++_i721)
{
- _elem704 = iprot.readString();
- struct.topo_ids.add(_elem704);
+ _elem720 = iprot.readString();
+ struct.topo_ids.add(_elem720);
}
}
struct.set_topo_ids_isSet(true);
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/localizer/LocalResource.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/localizer/LocalResource.java b/storm-core/src/jvm/backtype/storm/localizer/LocalResource.java
new file mode 100644
index 0000000..beebbd2
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/localizer/LocalResource.java
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.localizer;
+
+/**
+ * Local Resource requested by the topology
+ */
+public class LocalResource {
+ private String _blobKey;
+ private boolean _uncompress;
+
+ public LocalResource(String keyname, boolean uncompress) {
+ _blobKey = keyname;
+ _uncompress = uncompress;
+ }
+
+ public String getBlobName() {
+ return _blobKey;
+ }
+
+ public boolean shouldUncompress() {
+ return _uncompress;
+ }
+
+ @Override
+ public String toString() {
+ return "Key: " + _blobKey + " uncompress: " + _uncompress;
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/localizer/LocalizedResource.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/localizer/LocalizedResource.java b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResource.java
new file mode 100644
index 0000000..3550d36
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResource.java
@@ -0,0 +1,130 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.localizer;
+
+import backtype.storm.utils.Utils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicLong;
+
+/**
+ * Represents a resource that is localized on the supervisor.
+ * A localized resource has a .current symlink to the current version file which is named
+ * filename.{current version}. There is also a filename.version which contains the latest version.
+ */
+public class LocalizedResource {
+ public static final Logger LOG = LoggerFactory.getLogger(LocalizedResource.class);
+
+ // filesystem path to the resource
+ private final String _localPath;
+ private final String _versionFilePath;
+ private final String _symlinkPath;
+ private final String _key;
+ private final boolean _uncompressed;
+ // _size of the resource
+ private long _size = -1;
+ // queue of topologies referencing resource
+ private final Set<String> _ref;
+ // last access time of the resource -> increment when topology finishes using it
+ private final AtomicLong _lastAccessTime = new AtomicLong(currentTime());
+
+ public LocalizedResource(String key, String fileLoc, boolean uncompressed) {
+ _ref = new HashSet<String>();
+ _localPath = fileLoc;
+ _versionFilePath = Utils.constructVersionFileName(fileLoc);
+ _symlinkPath = Utils.constructBlobCurrentSymlinkName(fileLoc);
+ _uncompressed = uncompressed;
+ _key = key;
+ // we trust that the file exists
+ _size = Utils.getDU(new File(getFilePathWithVersion()));
+ LOG.debug("size of {} is: {}", fileLoc, _size);
+ }
+
+ // create local resource and add reference
+ public LocalizedResource(String key, String fileLoc, boolean uncompressed, String topo) {
+ this(key, fileLoc, uncompressed);
+ _ref.add(topo);
+ }
+
+ public boolean isUncompressed() {
+ return _uncompressed;
+ }
+
+ public String getKey() {
+ return _key;
+ }
+
+ public String getCurrentSymlinkPath() {
+ return _symlinkPath;
+ }
+
+ public String getVersionFilePath() {
+ return _versionFilePath;
+ }
+
+ public String getFilePathWithVersion() {
+ long version = Utils.localVersionOfBlob(_localPath);
+ return Utils.constructBlobWithVersionFileName(_localPath, version);
+ }
+
+ public String getFilePath() {
+ return _localPath;
+ }
+
+ public void addReference(String topo) {
+ _ref.add(topo);
+ }
+
+ public void removeReference(String topo) {
+ if (!_ref.remove(topo)) {
+ LOG.warn("Tried to remove a reference to a topology that doesn't use this resource");
+ }
+ setTimestamp();
+ }
+
+ // The last access time is only valid if the resource doesn't have any references.
+ public long getLastAccessTime() {
+ return _lastAccessTime.get();
+ }
+
+ // for testing
+ protected void setSize(long size) {
+ _size = size;
+ }
+
+ public long getSize() {
+ return _size;
+ }
+
+ private void setTimestamp() {
+ _lastAccessTime.set(currentTime());
+ }
+
+ public int getRefCount() {
+ return _ref.size();
+ }
+
+ private long currentTime() {
+ return System.nanoTime();
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceRetentionSet.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceRetentionSet.java b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceRetentionSet.java
new file mode 100644
index 0000000..29cc5a2
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceRetentionSet.java
@@ -0,0 +1,140 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.localizer;
+
+import org.apache.commons.io.FileUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.SortedMap;
+import java.util.TreeMap;
+/**
+ * A set of resources that we can look at to see which ones we retain and which ones should be
+ * removed.
+ */
+public class LocalizedResourceRetentionSet {
+ public static final Logger LOG = LoggerFactory.getLogger(LocalizedResourceRetentionSet.class);
+ private long _delSize;
+ private long _currentSize;
+ // targetSize in Bytes
+ private long _targetSize;
+ private final SortedMap<LocalizedResource, LocalizedResourceSet> _noReferences;
+
+ LocalizedResourceRetentionSet(long targetSize) {
+ this(targetSize, new LRUComparator());
+ }
+
+ LocalizedResourceRetentionSet(long targetSize, Comparator<? super LocalizedResource> cmp) {
+ this(targetSize, new TreeMap<LocalizedResource, LocalizedResourceSet>(cmp));
+ }
+
+ LocalizedResourceRetentionSet(long targetSize,
+ SortedMap<LocalizedResource, LocalizedResourceSet> retain) {
+ this._noReferences = retain;
+ this._targetSize = targetSize;
+ }
+
+ // for testing
+ protected int getSizeWithNoReferences() {
+ return _noReferences.size();
+ }
+
+ protected void addResourcesForSet(Iterator<LocalizedResource> setIter, LocalizedResourceSet set) {
+ for (Iterator<LocalizedResource> iter = setIter; setIter.hasNext(); ) {
+ LocalizedResource lrsrc = iter.next();
+ _currentSize += lrsrc.getSize();
+ if (lrsrc.getRefCount() > 0) {
+ // always retain resources in use
+ continue;
+ }
+ LOG.debug("adding {} to be checked for cleaning", lrsrc.getKey());
+ _noReferences.put(lrsrc, set);
+ }
+ }
+
+ public void addResources(LocalizedResourceSet set) {
+ addResourcesForSet(set.getLocalFilesIterator(), set);
+ addResourcesForSet(set.getLocalArchivesIterator(), set);
+ }
+
+ public void cleanup() {
+ LOG.debug("cleanup target size: {} current size is: {}", _targetSize, _currentSize);
+ for (Iterator<Map.Entry<LocalizedResource, LocalizedResourceSet>> i =
+ _noReferences.entrySet().iterator();
+ _currentSize - _delSize > _targetSize && i.hasNext();) {
+ Map.Entry<LocalizedResource, LocalizedResourceSet> rsrc = i.next();
+ LocalizedResource resource = rsrc.getKey();
+ LocalizedResourceSet set = rsrc.getValue();
+ if (resource != null && set.remove(resource)) {
+ if (deleteResource(resource)) {
+ _delSize += resource.getSize();
+ LOG.info("deleting: " + resource.getFilePath() + " size of: " + resource.getSize());
+ i.remove();
+ } else {
+ // since it failed to delete add it back so it gets retried
+ set.addResource(resource.getKey(), resource, resource.isUncompressed());
+ }
+ }
+ }
+ }
+
+ protected boolean deleteResource(LocalizedResource resource){
+ try {
+ String fileWithVersion = resource.getFilePathWithVersion();
+ String currentSymlinkName = resource.getCurrentSymlinkPath();
+ String versionFile = resource.getVersionFilePath();
+ File deletePath = new File(fileWithVersion);
+ if (resource.isUncompressed()) {
+ // this doesn't follow symlinks, which is what we want
+ FileUtils.deleteDirectory(deletePath);
+ } else {
+ Files.delete(deletePath.toPath());
+ }
+ Files.delete(new File(currentSymlinkName).toPath());
+ Files.delete(new File(versionFile).toPath());
+ return true;
+ } catch (IOException e) {
+ LOG.warn("Could not delete: {}", resource.getFilePath());
+ }
+ return false;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder();
+ sb.append("Cache: ").append(_currentSize).append(", ");
+ sb.append("Deleted: ").append(_delSize);
+ return sb.toString();
+ }
+
+ static class LRUComparator implements Comparator<LocalizedResource> {
+ public int compare(LocalizedResource r1, LocalizedResource r2) {
+ long ret = r1.getLastAccessTime() - r2.getLastAccessTime();
+ if (0 == ret) {
+ return System.identityHashCode(r1) - System.identityHashCode(r2);
+ }
+ return ret > 0 ? 1 : -1;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/storm/blob/7029aee5/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceSet.java
----------------------------------------------------------------------
diff --git a/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceSet.java b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceSet.java
new file mode 100644
index 0000000..fa5e54c
--- /dev/null
+++ b/storm-core/src/jvm/backtype/storm/localizer/LocalizedResourceSet.java
@@ -0,0 +1,101 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package backtype.storm.localizer;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Iterator;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+/**
+ * Set of localized resources for a specific user.
+ */
+public class LocalizedResourceSet {
+
+ public static final Logger LOG = LoggerFactory.getLogger(LocalizedResourceSet.class);
+ // Key to LocalizedResource mapping for files
+ private final ConcurrentMap<String, LocalizedResource> _localrsrcFiles;
+ // Key to LocalizedResource mapping for files to be uncompressed
+ private final ConcurrentMap<String, LocalizedResource> _localrsrcArchives;
+ private String _user;
+
+ LocalizedResourceSet(String user) {
+ this._localrsrcFiles = new ConcurrentHashMap<String, LocalizedResource>();
+ this._localrsrcArchives = new ConcurrentHashMap<String, LocalizedResource>();
+ _user = user;
+ }
+
+ public String getUser() {
+ return _user;
+ }
+
+ public int getSize() {
+ return _localrsrcFiles.size() + _localrsrcArchives.size();
+ }
+
+ public LocalizedResource get(String name, boolean uncompress) {
+ if (uncompress) {
+ return _localrsrcArchives.get(name);
+ }
+ return _localrsrcFiles.get(name);
+ }
+
+ public void updateResource(String resourceName, LocalizedResource updatedResource,
+ boolean uncompress) {
+ if (uncompress) {
+ _localrsrcArchives.putIfAbsent(resourceName, updatedResource);
+ } else {
+ _localrsrcFiles.putIfAbsent(resourceName, updatedResource);
+ }
+ }
+
+ public void addResource(String resourceName, LocalizedResource newResource, boolean uncompress) {
+ if (uncompress) {
+ _localrsrcArchives.put(resourceName, newResource);
+ } else {
+ _localrsrcFiles.put(resourceName, newResource);
+ }
+ }
+
+ public boolean exists(String resourceName, boolean uncompress) {
+ if (uncompress) {
+ return (_localrsrcArchives.get(resourceName) != null);
+ }
+ return (_localrsrcFiles.get(resourceName) != null);
+ }
+
+ public boolean remove(LocalizedResource resource) {
+ LocalizedResource lrsrc = null;
+ if (resource.isUncompressed()) {
+ lrsrc = _localrsrcArchives.remove(resource.getKey());
+ } else {
+ lrsrc = _localrsrcFiles.remove(resource.getKey());
+ }
+ return (lrsrc != null);
+ }
+
+ public Iterator<LocalizedResource> getLocalFilesIterator() {
+ return _localrsrcFiles.values().iterator();
+ }
+
+ public Iterator<LocalizedResource> getLocalArchivesIterator() {
+ return _localrsrcArchives.values().iterator();
+ }
+}