You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by kl...@apache.org on 2017/05/12 22:18:11 UTC

[08/51] [abbrv] [partial] geode git commit: GEODE-2632: change dependencies on GemFireCacheImpl to InternalCache

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
index 6d2e906..08e3364 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
@@ -14,15 +14,13 @@
  */
 package org.apache.geode.pdx.internal;
 
-import java.nio.ByteBuffer;
 import java.util.Date;
 
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSourceFactory;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.PdxInstanceFactory;
-import org.apache.geode.pdx.PdxUnreadFields;
 
 /**
  * PdxInstances created with this factory can never be deserialized but you can access their fields
@@ -32,26 +30,27 @@ import org.apache.geode.pdx.PdxUnreadFields;
  * PdxType is expensive since it can never figure out it is already defined without doing an
  * expensive check in the type registry. We should optimize this before making this a public
  * feature.
- *
  */
 public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
 
   private final PdxWriterImpl writer;
+
   private boolean created = false;
 
   private PdxInstanceFactoryImpl(String name, boolean expectDomainClass) {
-    PdxOutputStream os = new PdxOutputStream();
-    PdxType pt = new PdxType(name, expectDomainClass);
-    GemFireCacheImpl gfc = GemFireCacheImpl
+    PdxOutputStream pdxOutputStream = new PdxOutputStream();
+    PdxType pdxType = new PdxType(name, expectDomainClass);
+    InternalCache internalCache = GemFireCacheImpl
         .getForPdx("PDX registry is unavailable because the Cache has been closed.");
-    TypeRegistry tr = gfc.getPdxRegistry();
-    this.writer = new PdxWriterImpl(pt, tr, os);
+    TypeRegistry pdxRegistry = internalCache.getPdxRegistry();
+    this.writer = new PdxWriterImpl(pdxType, pdxRegistry, pdxOutputStream);
   }
 
   public static PdxInstanceFactory newCreator(String name, boolean expectDomainClass) {
     return new PdxInstanceFactoryImpl(name, expectDomainClass);
   }
 
+  @Override
   public PdxInstance create() {
     if (this.created) {
       throw new IllegalStateException("The create method can only be called once.");
@@ -61,135 +60,149 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this.writer.makePdxInstance();
   }
 
+  @Override
   public PdxInstanceFactory writeChar(String fieldName, char value) {
     this.writer.writeChar(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeBoolean(String fieldName, boolean value) {
     this.writer.writeBoolean(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeByte(String fieldName, byte value) {
     this.writer.writeByte(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeShort(String fieldName, short value) {
     this.writer.writeShort(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeInt(String fieldName, int value) {
     this.writer.writeInt(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeLong(String fieldName, long value) {
     this.writer.writeLong(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeFloat(String fieldName, float value) {
     this.writer.writeFloat(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeDouble(String fieldName, double value) {
     this.writer.writeDouble(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeDate(String fieldName, Date date) {
-    this.writer.writeDate(fieldName, date);
+  @Override
+  public PdxInstanceFactory writeDate(String fieldName, Date value) {
+    this.writer.writeDate(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeString(String fieldName, String value) {
     this.writer.writeString(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeObject(String fieldName, Object object) {
-    return writeObject(fieldName, object, false);
+  @Override
+  public PdxInstanceFactory writeObject(String fieldName, Object value) {
+    return writeObject(fieldName, value, false);
   }
 
-  public PdxInstanceFactory writeBooleanArray(String fieldName, boolean[] array) {
-    this.writer.writeBooleanArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeBooleanArray(String fieldName, boolean[] value) {
+    this.writer.writeBooleanArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeCharArray(String fieldName, char[] array) {
-    this.writer.writeCharArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeCharArray(String fieldName, char[] value) {
+    this.writer.writeCharArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeByteArray(String fieldName, byte[] array) {
-    this.writer.writeByteArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeByteArray(String fieldName, byte[] value) {
+    this.writer.writeByteArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeShortArray(String fieldName, short[] array) {
-    this.writer.writeShortArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeShortArray(String fieldName, short[] value) {
+    this.writer.writeShortArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeIntArray(String fieldName, int[] array) {
-    this.writer.writeIntArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeIntArray(String fieldName, int[] value) {
+    this.writer.writeIntArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeLongArray(String fieldName, long[] array) {
-    this.writer.writeLongArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeLongArray(String fieldName, long[] value) {
+    this.writer.writeLongArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeFloatArray(String fieldName, float[] array) {
-    this.writer.writeFloatArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeFloatArray(String fieldName, float[] value) {
+    this.writer.writeFloatArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeDoubleArray(String fieldName, double[] array) {
-    this.writer.writeDoubleArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeDoubleArray(String fieldName, double[] value) {
+    this.writer.writeDoubleArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeStringArray(String fieldName, String[] array) {
-    this.writer.writeStringArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeStringArray(String fieldName, String[] value) {
+    this.writer.writeStringArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeObjectArray(String fieldName, Object[] array) {
-    return writeObjectArray(fieldName, array, false);
-  }
-
-  public PdxInstanceFactory writeUnreadFields(PdxUnreadFields unread) {
-    this.writer.writeUnreadFields(unread);
-    return this;
+  @Override
+  public PdxInstanceFactory writeObjectArray(String fieldName, Object[] value) {
+    return writeObjectArray(fieldName, value, false);
   }
 
-  public PdxInstanceFactory writeRaw(PdxField field, ByteBuffer rawData) {
-    this.writer.writeRawField(field, ByteSourceFactory.create(rawData));
-    return this;
-  }
-
-
-  public PdxInstanceFactory writeArrayOfByteArrays(String fieldName, byte[][] array) {
-    this.writer.writeArrayOfByteArrays(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeArrayOfByteArrays(String fieldName, byte[][] value) {
+    this.writer.writeArrayOfByteArrays(fieldName, value);
     return this;
   }
 
+  @Override
   public <CT, VT extends CT> PdxInstanceFactory writeField(String fieldName, VT fieldValue,
       Class<CT> fieldType) {
     return writeField(fieldName, fieldValue, fieldType, false);
   }
 
+  @Override
   public PdxInstanceFactory markIdentityField(String fieldName) {
     this.writer.markIdentityField(fieldName);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeObject(String fieldName, Object value, boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
       boolean alreadyInProgress = InternalDataSerializer.isPdxSerializationInProgress();
@@ -210,6 +223,7 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeObjectArray(String fieldName, Object[] value,
       boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
@@ -230,6 +244,7 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this;
   }
 
+  @Override
   public <CT, VT extends CT> PdxInstanceFactory writeField(String fieldName, VT fieldValue,
       Class<CT> fieldType, boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
@@ -251,14 +266,14 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
   }
 
   public static PdxInstance createPdxEnum(String className, String enumName, int enumOrdinal,
-      GemFireCacheImpl gfc) {
+      InternalCache internalCache) {
     if (className == null) {
       throw new IllegalArgumentException("className must not be null");
     }
     if (enumName == null) {
       throw new IllegalArgumentException("enumName must not be null");
     }
-    TypeRegistry tr = gfc.getPdxRegistry();
+    TypeRegistry tr = internalCache.getPdxRegistry();
     EnumInfo ei = new EnumInfo(className, enumName, enumOrdinal);
     return ei.getPdxInstance(tr.defineEnum(ei));
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
index f8acaad..d429601 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceImpl.java
@@ -14,8 +14,6 @@
  */
 package org.apache.geode.pdx.internal;
 
-import static org.apache.logging.log4j.message.MapMessage.MapFormat.JSON;
-
 import java.io.DataInput;
 import java.io.DataOutput;
 import java.io.IOException;
@@ -30,8 +28,8 @@ import java.util.TreeSet;
 
 import com.fasterxml.jackson.databind.DeserializationFeature;
 import com.fasterxml.jackson.databind.ObjectMapper;
-
 import org.apache.commons.lang.StringUtils;
+
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.distributed.internal.DMStats;
 import org.apache.geode.internal.ClassPathLoader;
@@ -39,6 +37,7 @@ import org.apache.geode.internal.DSCODE;
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.Sendable;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.tcp.ByteBufferInputStream;
 import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSource;
 import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSourceFactory;
@@ -52,11 +51,9 @@ import org.apache.geode.pdx.WritablePdxInstance;
  * it must call {@link #getUnmodifiableReader()} and access the super class state using it. This
  * class could be changed to not extend PdxReaderImpl but to instead have an instance variable that
  * is a PdxReaderImpl but that would cause this class to use more memory.
- * 
+ * <p>
  * We do not use this normal java io serialization when serializing this class in GemFire because
  * Sendable takes precedence over Serializable.
- * 
- *
  */
 public class PdxInstanceImpl extends PdxReaderImpl
     implements PdxInstance, Sendable, ConvertableToBytes {
@@ -87,6 +84,7 @@ public class PdxInstanceImpl extends PdxReaderImpl
    * equality rule, where hash code can be same for non-equal objects.
    */
   private static final int UNUSED_HASH_CODE = 0;
+
   private transient volatile int cachedHashCode = UNUSED_HASH_CODE;
 
   private static final ThreadLocal<Boolean> pdxGetObjectInProgress = new ThreadLocal<Boolean>();
@@ -144,9 +142,9 @@ public class PdxInstanceImpl extends PdxReaderImpl
   private PdxWriterImpl convertToTypeWithNoDeletedFields(PdxReaderImpl ur) {
     PdxOutputStream os = new PdxOutputStream();
     PdxType pt = new PdxType(ur.getPdxType().getClassName(), !ur.getPdxType().getNoDomainClass());
-    GemFireCacheImpl gfc = GemFireCacheImpl
+    InternalCache cache = GemFireCacheImpl
         .getForPdx("PDX registry is unavailable because the Cache has been closed.");
-    TypeRegistry tr = gfc.getPdxRegistry();
+    TypeRegistry tr = cache.getPdxRegistry();
     PdxWriterImpl writer = new PdxWriterImpl(pt, tr, os);
     for (PdxField field : pt.getFields()) {
       if (!field.isDeleted()) {
@@ -328,15 +326,9 @@ public class PdxInstanceImpl extends PdxReaderImpl
       return true;
 
     if (obj == null) {
-      // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#0 o1=<" + this + "> o2=<" +
-      // obj + ">");
       return false;
     }
     if (!(obj instanceof PdxInstanceImpl)) {
-      // if (!result) {
-      // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#1 o1=<" + this + "> o2=<" +
-      // obj + ">");
-      // }
       return false;
     }
     final PdxInstanceImpl other = (PdxInstanceImpl) obj;
@@ -344,8 +336,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
     PdxReaderImpl ur1 = getUnmodifiableReader();
 
     if (!ur1.getPdxType().getClassName().equals(ur2.getPdxType().getClassName())) {
-      // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#2 o1=<" + this + "> o2=<" +
-      // obj + ">");
       return false;
     }
 
@@ -359,7 +349,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
       addDefaultFields(otherFields, myFields);
     }
 
-
     Iterator<PdxField> myFieldIterator = myFields.iterator();
     Iterator<PdxField> otherFieldIterator = otherFields.iterator();
     while (myFieldIterator.hasNext()) {
@@ -390,8 +379,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
           ByteSource myBuffer = ur1.getRaw(myType);
           ByteSource otherBuffer = ur2.getRaw(otherType);
           if (!myBuffer.equals(otherBuffer)) {
-            // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#4 o1=<" + this + ">
-            // o2=<" + obj + ">");
             return false;
           }
         }
@@ -401,8 +388,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
           Object[] myArray = ur1.readObjectArray(myType);
           Object[] otherArray = ur2.readObjectArray(otherType);
           if (!Arrays.deepEquals(myArray, otherArray)) {
-            // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#5 o1=<" + this + ">
-            // o2=<" + obj + ">");
             return false;
           }
         }
@@ -413,43 +398,29 @@ public class PdxInstanceImpl extends PdxReaderImpl
           Object otherObject = ur2.readObject(otherType);
           if (myObject != otherObject) {
             if (myObject == null) {
-              // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#6 o1=<" + this + ">
-              // o2=<" + obj + ">");
               return false;
             }
             if (otherObject == null) {
-              // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#7 o1=<" + this + ">
-              // o2=<" + obj + ">");
               return false;
             }
             if (myObject.getClass().isArray()) { // for bug 42976
               Class<?> myComponentType = myObject.getClass().getComponentType();
               Class<?> otherComponentType = otherObject.getClass().getComponentType();
               if (!myComponentType.equals(otherComponentType)) {
-                // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#8 o1=<" + this + ">
-                // o2=<" + obj + ">");
                 return false;
               }
               if (myComponentType.isPrimitive()) {
                 ByteSource myBuffer = getRaw(myType);
                 ByteSource otherBuffer = other.getRaw(otherType);
                 if (!myBuffer.equals(otherBuffer)) {
-                  // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#9 o1=<" + this +
-                  // "> o2=<" + obj + ">");
                   return false;
                 }
               } else {
                 if (!Arrays.deepEquals((Object[]) myObject, (Object[]) otherObject)) {
-                  // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#10 o1=<" + this +
-                  // "> o2=<" + obj + ">");
                   return false;
                 }
               }
             } else if (!myObject.equals(otherObject)) {
-              // GemFireCacheImpl.getInstance().getLogger().info("DEBUG equals#11 fn=" +
-              // myType.getFieldName() + " myFieldClass=" + myObject.getClass() + "
-              // otherFieldCLass=" + otherObject.getClass() + " o1=<" + this + "> o2=<" + obj + ">"
-              // + "myObj=<" + myObject + "> otherObj=<" + otherObject + ">");
               return false;
             }
           }
@@ -463,7 +434,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
     return true;
   }
 
-
   /**
    * Any fields that are in otherFields but not in myFields are added to myFields as defaults. When
    * adding fields they are inserted in the natural sort order. Note: myFields may be modified by
@@ -483,9 +453,7 @@ public class PdxInstanceImpl extends PdxReaderImpl
     StringBuilder result = new StringBuilder();
     PdxReaderImpl ur = getUnmodifiableReader();
     result.append("PDX[").append(ur.getPdxType().getTypeId()).append(",")
-        .append(ur.getPdxType().getClassName())
-        // .append(",limit=").append(this.dis.size())
-        .append("]{");
+        .append(ur.getPdxType().getClassName()).append("]{");
     boolean firstElement = true;
     for (PdxField fieldType : ur.getPdxType().getSortedIdentityFields()) {
       if (firstElement) {
@@ -494,8 +462,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
         result.append(", ");
       } ;
       result.append(fieldType.getFieldName());
-      // result.append(':').append(fieldType.getTypeIdString()); // DEBUG
-      // result.append(':').append(getAbsolutePosition(fieldType)); // DEBUG
       result.append("=");
       try {
         // TODO check to see if getField returned an array and if it did use Arrays.deepToString
@@ -663,7 +629,6 @@ public class PdxInstanceImpl extends PdxReaderImpl
     return getUnmodifiableReader(fieldName).readRawField(fieldName);
   }
 
-
   public Object getDefaultValueIfFieldExistsInAnyPdxVersions(String fieldName, String className)
       throws FieldNotFoundInPdxVersion {
     PdxType pdxType =

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxReaderImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxReaderImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxReaderImpl.java
index 822fc99..801157e 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxReaderImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxReaderImpl.java
@@ -24,6 +24,7 @@ import org.apache.geode.InternalGemFireException;
 import org.apache.geode.internal.DSCODE;
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.tcp.ByteBufferInputStream;
 import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSource;
@@ -560,7 +561,6 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
   }
 
   /**
-   * 
    * @param idx of the variable length field
    * @return the offset to the variable length field
    */
@@ -697,10 +697,10 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
     // only create a tracking one if we might need it
     UnreadPdxType unreadLocalPdxType = null;
     boolean needToTrackReads = TESTHOOK_TRACKREADS;
-    GemFireCacheImpl gfc = GemFireCacheImpl
+    InternalCache cache = GemFireCacheImpl
         .getForPdx("PDX registry is unavailable because the Cache has been closed.");
-    TypeRegistry tr = gfc.getPdxRegistry();
-    if (!gfc.getPdxIgnoreUnreadFields()) {
+    TypeRegistry tr = cache.getPdxRegistry();
+    if (!cache.getPdxIgnoreUnreadFields()) {
       PdxType localPdxType = tr.getExistingTypeForClass(pdxClass);
       if (localPdxType != null) {
         if (getPdxType().getTypeId() != localPdxType.getTypeId()
@@ -736,7 +736,7 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
       }
       ((PdxSerializable) result).fromData(pdxReader);
     } else {
-      PdxSerializer pdxSerializer = gfc.getPdxSerializer();
+      PdxSerializer pdxSerializer = cache.getPdxSerializer();
       if (pdxSerializer != null) {
         result = pdxSerializer.fromData(pdxClass, pdxReader);
         if (result == null) {
@@ -843,8 +843,6 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
   public void orderedDeserialize(Object obj, AutoClassInfo ci) {
     PdxReaderImpl reader = prepForOrderedReading();
     for (PdxFieldWrapper f : ci.getFields()) {
-      // System.out.println("DEBUG reading field=" + f.getField().getName() + " offset=" +
-      // reader.dis.position());
       f.orderedDeserialize(reader, obj);
     }
   }
@@ -866,8 +864,6 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
   }
 
   /**
-   * 
-   * @param field
    * @return PdxString if field is a String otherwise invokes {@link #readField(String)}
    */
   public Object readRawField(String field) {
@@ -888,9 +884,6 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
   /**
    * This method checks whether Object field is String type. If its String then it returns PdxString
    * otherwise null.
-   * 
-   * @param ft
-   * @return
    */
   private PdxString getPdxStringFromObjectField(PdxField ft) {
     if (ft.getFieldType() == FieldType.OBJECT) {
@@ -912,8 +905,6 @@ public class PdxReaderImpl implements InternalPdxReader, java.io.Serializable {
   }
 
   /**
-   * 
-   * @param ft
    * @return returns {@link PdxString}
    */
   public PdxString readPdxString(PdxField ft) {

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxUnreadData.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxUnreadData.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxUnreadData.java
index 82883b1..d2621b5 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxUnreadData.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxUnreadData.java
@@ -14,15 +14,13 @@
  */
 package org.apache.geode.pdx.internal;
 
-import java.nio.ByteBuffer;
-
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSource;
 import org.apache.geode.pdx.PdxFieldAlreadyExistsException;
 import org.apache.geode.pdx.PdxUnreadFields;
 
 /**
- * 
  * @since GemFire 6.6
  */
 public class PdxUnreadData implements PdxUnreadFields {
@@ -31,6 +29,7 @@ public class PdxUnreadData implements PdxUnreadFields {
    * This is the original type of the blob that we deserialized and did not read some of its fields.
    */
   private UnreadPdxType unreadType;
+
   private byte[][] unreadData;
 
   public PdxUnreadData() {
@@ -104,10 +103,10 @@ public class PdxUnreadData implements PdxUnreadFields {
     // This method is only called by CopyHelper which is public and does not require that a Cache
     // exists.
     // So we need to call getInstance instead of getExisting.
-    GemFireCacheImpl gfc = GemFireCacheImpl.getInstance();
-    if (gfc == null)
+    InternalCache cache = GemFireCacheImpl.getInstance();
+    if (cache == null)
       return;
-    TypeRegistry tr = gfc.getPdxRegistry();
+    TypeRegistry tr = cache.getPdxRegistry();
     PdxUnreadData ud = tr.getUnreadData(o);
     if (ud != null && !ud.isEmpty()) {
       tr.putUnreadData(copy, ud);

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxWriterImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxWriterImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxWriterImpl.java
index 2aee53c..61be1ad 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxWriterImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxWriterImpl.java
@@ -14,6 +14,11 @@
  */
 package org.apache.geode.pdx.internal;
 
+import java.io.DataOutput;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.Date;
+
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.internal.DSCODE;
@@ -21,14 +26,16 @@ import org.apache.geode.internal.HeapDataOutputStream;
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
 import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSource;
-import org.apache.geode.pdx.*;
+import org.apache.geode.pdx.FieldType;
+import org.apache.geode.pdx.PdxFieldAlreadyExistsException;
+import org.apache.geode.pdx.PdxFieldDoesNotExistException;
+import org.apache.geode.pdx.PdxInstance;
+import org.apache.geode.pdx.PdxSerializable;
+import org.apache.geode.pdx.PdxSerializationException;
+import org.apache.geode.pdx.PdxUnreadFields;
+import org.apache.geode.pdx.PdxWriter;
 import org.apache.geode.pdx.internal.AutoSerializableManager.AutoClassInfo;
 
-import java.io.DataOutput;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Date;
-
 /**
  * A new instance of this class is created for each (nested) instance of {@link PdxSerializable}.
  * But it may share the underlying instance of {@link HeapDataOutputStream} with other instances of
@@ -47,6 +54,7 @@ public class PdxWriterImpl implements PdxWriter {
    * tr is no longer final because it is initialized late when using a PdxSerializer.
    */
   private TypeRegistry tr;
+
   private final Object pdx;
   private final PdxOutputStream os;
   private final AutoClassInfo aci;
@@ -55,16 +63,20 @@ public class PdxWriterImpl implements PdxWriter {
    * Offsets to the variable length fields.
    */
   private int[] vlfOffsets;
+
   /**
    * The number of variable length fields that need an offset. The first VLF does not need an
    * offset.
    */
   private int vlfCount = 0;
+
   private boolean hasSeenFirstVlf = false;
+
   /**
    * The offset into the hdos to the header.
    */
   protected final int headerOffset;
+
   private PdxUnreadData unreadData;
 
   private PdxType existingType;
@@ -80,6 +92,7 @@ public class PdxWriterImpl implements PdxWriter {
    */
   private static final boolean sysPropDoExtraPdxValidation =
       Boolean.getBoolean(DistributionConfig.GEMFIRE_PREFIX + "validatePdxWriters");
+
   private boolean doExtraValidation = sysPropDoExtraPdxValidation;
 
   public PdxWriterImpl(TypeRegistry tr, Object pdx, PdxOutputStream out) {
@@ -543,7 +556,6 @@ public class PdxWriterImpl implements PdxWriter {
   }
 
   /**
-   * 
    * @return the offset to the byte of the first field
    */
   private int getBaseOffset() {
@@ -561,11 +573,7 @@ public class PdxWriterImpl implements PdxWriter {
     int fieldDataSize = getCurrentOffset();
     // Take the list of offsets and append it in reverse order.
     byte sizeOfOffset = getSizeOfOffset(this.vlfCount, fieldDataSize);
-    // System.out.println("Size of each offset: " + sizeOfOffset +
-    // " byte(s), curPos: " + this.curPos + ", numOfOffsets: " +
-    // this.offsetIndex);
     for (int i = (this.vlfCount - 1); i >= 0; i--) {
-      // System.out.println("offset[" + i + "]: " + this.offsets[i]);
       switch (sizeOfOffset) {
         case 1:
           this.os.write((byte) this.vlfOffsets[i]);
@@ -612,7 +620,6 @@ public class PdxWriterImpl implements PdxWriter {
     return this.os.toByteArray();
   }
 
-
   private void markVariableField() {
     if (!this.hasSeenFirstVlf) {
       this.hasSeenFirstVlf = true;
@@ -829,7 +836,6 @@ public class PdxWriterImpl implements PdxWriter {
     }
   }
 
-
   private HeapDataOutputStream.LongUpdater lu;
 
   private void writeHeader() {

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
index b4fa33e..065255b 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
@@ -23,14 +23,15 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.cache.AttributesFactory;
-import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheWriterException;
 import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.DiskStore;
 import org.apache.geode.cache.EntryEvent;
-import org.apache.geode.cache.Operation;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionAttributes;
 import org.apache.geode.cache.RegionExistsException;
@@ -50,12 +51,9 @@ import org.apache.geode.distributed.internal.InternalDistributedSystem;
 import org.apache.geode.distributed.internal.locks.DLockService;
 import org.apache.geode.internal.CopyOnWriteHashSet;
 import org.apache.geode.internal.cache.DiskStoreImpl;
-import org.apache.geode.internal.cache.EntryEventImpl;
-import org.apache.geode.internal.cache.EnumListenerEvent;
-import org.apache.geode.internal.cache.EventID;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegionArguments;
-import org.apache.geode.internal.cache.LocalRegion;
 import org.apache.geode.internal.cache.TXManagerImpl;
 import org.apache.geode.internal.cache.TXStateProxy;
 import org.apache.geode.internal.logging.LogService;
@@ -63,23 +61,19 @@ import org.apache.geode.internal.util.concurrent.CopyOnWriteHashMap;
 import org.apache.geode.pdx.JSONFormatter;
 import org.apache.geode.pdx.PdxInitializationException;
 import org.apache.geode.pdx.PdxRegistryMismatchException;
-import org.apache.logging.log4j.Logger;
 
-/**
- *
- */
 public class PeerTypeRegistration implements TypeRegistration {
   private static final Logger logger = LogService.getLogger();
 
-  /**
-   * 
-   */
   private static final int MAX_TRANSACTION_FAILURES = 10;
+
   public static final String LOCK_SERVICE_NAME = "__PDX";
+
   /**
    * The region name. Public for tests only.
    */
   public static final String REGION_NAME = "PdxTypes";
+
   public static final String REGION_FULL_PATH = "/" + REGION_NAME;
   public static final int PLACE_HOLDER_FOR_TYPE_ID = 0xFFFFFF;
   public static final int PLACE_HOLDER_FOR_DS_ID = 0xFF000000;
@@ -88,7 +82,7 @@ public class PeerTypeRegistration implements TypeRegistration {
   private final int maxTypeId;
   private volatile DistributedLockService dls;
   private final Object dlsLock = new Object();
-  private GemFireCacheImpl cache;
+  private InternalCache cache;
 
   /**
    * The region where the PDX metadata is stored. Because this region is transactional for our
@@ -104,17 +98,18 @@ public class PeerTypeRegistration implements TypeRegistration {
    */
   private Map<PdxType, Integer> typeToId =
       Collections.synchronizedMap(new HashMap<PdxType, Integer>());
+
   private Map<EnumInfo, EnumId> enumToId =
       Collections.synchronizedMap(new HashMap<EnumInfo, EnumId>());
+
   private final Map<String, Set<PdxType>> classToType =
       new CopyOnWriteHashMap<String, Set<PdxType>>();
 
   private volatile boolean typeRegistryInUse = false;
 
-  public PeerTypeRegistration(GemFireCacheImpl cache) {
+  public PeerTypeRegistration(InternalCache cache) {
     this.cache = cache;
 
-
     int distributedSystemId =
         cache.getInternalDistributedSystem().getDistributionManager().getDistributedSystemId();
     if (distributedSystemId == -1) {
@@ -378,7 +373,7 @@ public class PeerTypeRegistration implements TypeRegistration {
     verifyConfiguration();
     Integer existingId = typeToId.get(newType);
     if (existingId != null) {
-      return existingId.intValue();
+      return existingId;
     }
     lock();
     try {
@@ -392,8 +387,7 @@ public class PeerTypeRegistration implements TypeRegistration {
 
       updateIdToTypeRegion(newType);
 
-      typeToId.put(newType, Integer.valueOf(id));
-      // this.cache.getLogger().info("Defining: " + newType, new RuntimeException("STACK"));
+      typeToId.put(newType, id);
 
       return newType.getTypeId();
     } finally {
@@ -411,11 +405,11 @@ public class PeerTypeRegistration implements TypeRegistration {
 
   private void updateRegion(Object k, Object v) {
     Region<Object, Object> r = getIdToType();
-    Cache c = (Cache) r.getRegionService();
+    InternalCache cache = (InternalCache) r.getRegionService();
 
     checkDistributedTypeRegistryState();
 
-    TXManagerImpl txManager = (TXManagerImpl) c.getCacheTransactionManager();
+    TXManagerImpl txManager = (TXManagerImpl) cache.getCacheTransactionManager();
     TXStateProxy currentState = suspendTX();
     boolean state = useUDPMessagingIfNecessary();
     try {
@@ -459,7 +453,6 @@ public class PeerTypeRegistration implements TypeRegistration {
     } finally {
       resumeTX(currentState);
     }
-
   }
 
   public void addRemoteType(int typeId, PdxType type) {
@@ -537,10 +530,10 @@ public class PeerTypeRegistration implements TypeRegistration {
   }
 
   public boolean hasPersistentRegions() {
-    Collection<DiskStoreImpl> diskStores = cache.listDiskStoresIncludingRegionOwned();
+    Collection<DiskStore> diskStores = cache.listDiskStoresIncludingRegionOwned();
     boolean hasPersistentRegions = false;
-    for (DiskStoreImpl store : diskStores) {
-      hasPersistentRegions |= store.hasPersistedData();
+    for (DiskStore store : diskStores) {
+      hasPersistentRegions |= ((DiskStoreImpl) store).hasPersistedData();
     }
     return hasPersistentRegions;
   }
@@ -574,7 +567,7 @@ public class PeerTypeRegistration implements TypeRegistration {
         } else {
           PdxType foundType = (PdxType) v;
           Integer id = (Integer) k;
-          int tmpDsId = PLACE_HOLDER_FOR_DS_ID & id.intValue();
+          int tmpDsId = PLACE_HOLDER_FOR_DS_ID & id;
           if (tmpDsId == this.dsId) {
             totalPdxTypeIdInDS++;
           }
@@ -633,10 +626,9 @@ public class PeerTypeRegistration implements TypeRegistration {
   }
 
   private TXStateProxy suspendTX() {
-    Cache c = (Cache) getIdToType().getRegionService();
-    TXManagerImpl txManager = (TXManagerImpl) c.getCacheTransactionManager();
-    TXStateProxy currentState = txManager.internalSuspend();
-    return currentState;
+    InternalCache cache = (InternalCache) getIdToType().getRegionService();
+    TXManagerImpl txManager = (TXManagerImpl) cache.getCacheTransactionManager();
+    return txManager.internalSuspend();
   }
 
   private void resumeTX(TXStateProxy state) {
@@ -756,11 +748,8 @@ public class PeerTypeRegistration implements TypeRegistration {
     return enums;
   }
 
-
   /**
    * adds a PdxType for a field to a {@code className => Set<PdxType>} map
-   * 
-   * @param type
    */
   private void updateClassToTypeMap(PdxType type) {
     if (type != null) {
@@ -790,14 +779,14 @@ public class PeerTypeRegistration implements TypeRegistration {
     return null;
   }
 
-  /*
+  /**
    * For testing purpose
    */
   public Map<String, Set<PdxType>> getClassToType() {
     return classToType;
   }
 
-  /*
+  /**
    * test hook
    */
   @Override
@@ -823,7 +812,7 @@ public class PeerTypeRegistration implements TypeRegistration {
   }
 
   public static int getPdxRegistrySize() {
-    GemFireCacheImpl cache = GemFireCacheImpl.getExisting();
+    InternalCache cache = GemFireCacheImpl.getExisting();
     if (cache == null) {
       return 0;
     }

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
index ebca878..e245b34 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
@@ -14,6 +14,13 @@
  */
 package org.apache.geode.pdx.internal;
 
+import static java.lang.Integer.*;
+
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.cache.CacheClosedException;
 import org.apache.geode.cache.DiskStore;
 import org.apache.geode.cache.DiskStoreFactory;
@@ -21,7 +28,7 @@ import org.apache.geode.cache.wan.GatewaySender;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.internal.Assert;
 import org.apache.geode.internal.InternalDataSerializer;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.util.concurrent.CopyOnWriteHashMap;
@@ -29,11 +36,6 @@ import org.apache.geode.internal.util.concurrent.CopyOnWriteWeakHashMap;
 import org.apache.geode.pdx.PdxSerializationException;
 import org.apache.geode.pdx.PdxSerializer;
 import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
-import org.apache.logging.log4j.Logger;
-
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-
 
 public class TypeRegistry {
   private static final Logger logger = LogService.getLogger();
@@ -41,31 +43,39 @@ public class TypeRegistry {
   private static final boolean DISABLE_TYPE_REGISTRY =
       Boolean.getBoolean(DistributionConfig.GEMFIRE_PREFIX + "TypeRegistry.DISABLE_PDX_REGISTRY");
 
-  private final Map<Integer, PdxType> idToType = new CopyOnWriteHashMap<Integer, PdxType>();
-  private final Map<PdxType, Integer> typeToId = new CopyOnWriteHashMap<PdxType, Integer>();
-  private final Map<Class<?>, PdxType> localTypeIds =
-      new CopyOnWriteWeakHashMap<Class<?>, PdxType>();
+  private final Map<Integer, PdxType> idToType = new CopyOnWriteHashMap<>();
+
+  private final Map<PdxType, Integer> typeToId = new CopyOnWriteHashMap<>();
+
+  private final Map<Class<?>, PdxType> localTypeIds = new CopyOnWriteWeakHashMap<>();
+
   private final Map<Class<?>, Map<Integer, UnreadPdxType>> localTypeIdMaps =
-      new CopyOnWriteWeakHashMap<Class<?>, Map<Integer, UnreadPdxType>>();
+      new CopyOnWriteWeakHashMap<>();
+
   private final WeakConcurrentIdentityHashMap<Object, PdxUnreadData> unreadDataMap =
       WeakConcurrentIdentityHashMap.make();
-  private final Map<Integer, EnumInfo> idToEnum = new CopyOnWriteHashMap<Integer, EnumInfo>();
-  private final Map<EnumInfo, Integer> enumInfoToId = new CopyOnWriteHashMap<EnumInfo, Integer>();
-  private final Map<Enum<?>, Integer> localEnumIds = new CopyOnWriteWeakHashMap<Enum<?>, Integer>();
+
+  private final Map<Integer, EnumInfo> idToEnum = new CopyOnWriteHashMap<>();
+
+  private final Map<EnumInfo, Integer> enumInfoToId = new CopyOnWriteHashMap<>();
+
+  private final Map<Enum<?>, Integer> localEnumIds = new CopyOnWriteWeakHashMap<>();
+
   private final TypeRegistration distributedTypeRegistry;
-  private final GemFireCacheImpl cache;
 
-  public TypeRegistry(GemFireCacheImpl cache, boolean disableTypeRegistry) {
+  private final InternalCache cache;
+
+  public TypeRegistry(InternalCache cache, boolean disableTypeRegistry) {
     this.cache = cache;
 
     if (DISABLE_TYPE_REGISTRY || disableTypeRegistry) {
-      distributedTypeRegistry = new NullTypeRegistration();
+      this.distributedTypeRegistry = new NullTypeRegistration();
     } else if (cache.hasPool()) {
-      distributedTypeRegistry = new ClientTypeRegistration(cache);
+      this.distributedTypeRegistry = new ClientTypeRegistration(cache);
     } else if (LonerTypeRegistration.isIndeterminateLoner(cache)) {
-      distributedTypeRegistry = new LonerTypeRegistration(cache);
+      this.distributedTypeRegistry = new LonerTypeRegistration(cache);
     } else {
-      distributedTypeRegistry = new PeerTypeRegistration(cache);
+      this.distributedTypeRegistry = new PeerTypeRegistration(cache);
     }
   }
 
@@ -77,7 +87,7 @@ public class TypeRegistry {
     this.idToType.clear();
     this.idToEnum.clear();
     this.enumInfoToId.clear();
-    distributedTypeRegistry.testClearRegistry();
+    this.distributedTypeRegistry.testClearRegistry();
   }
 
   public void testClearLocalTypeRegistry() {
@@ -86,17 +96,11 @@ public class TypeRegistry {
     this.localEnumIds.clear();
   }
 
-  public static boolean mayNeedDiskStore(GemFireCacheImpl cache) {
-    if (DISABLE_TYPE_REGISTRY) {
-      return false;
-    } else if (cache.hasPool()) {
-      return false;
-    } else {
-      return cache.getPdxPersistent();
-    }
+  public static boolean mayNeedDiskStore(InternalCache cache) {
+    return !DISABLE_TYPE_REGISTRY && !cache.hasPool() && cache.getPdxPersistent();
   }
 
-  public static String getPdxDiskStoreName(GemFireCacheImpl cache) {
+  public static String getPdxDiskStoreName(InternalCache cache) {
     if (!mayNeedDiskStore(cache)) {
       return null;
     } else {
@@ -109,9 +113,9 @@ public class TypeRegistry {
   }
 
   public void initialize() {
-    if (!cache.getPdxPersistent() || cache.getPdxDiskStore() == null
-        || cache.findDiskStore(cache.getPdxDiskStore()) != null) {
-      distributedTypeRegistry.initialize();
+    if (!this.cache.getPdxPersistent() || this.cache.getPdxDiskStore() == null
+        || this.cache.findDiskStore(this.cache.getPdxDiskStore()) != null) {
+      this.distributedTypeRegistry.initialize();
     }
   }
 
@@ -146,40 +150,39 @@ public class TypeRegistry {
     return null;
   }
 
-
-  public PdxType getExistingType(Object o) {
+  PdxType getExistingType(Object o) {
     return getExistingTypeForClass(o.getClass());
   }
 
-  public PdxType getExistingTypeForClass(Class<?> c) {
-    return this.localTypeIds.get(c);
+  public PdxType getExistingTypeForClass(Class<?> aClass) {
+    return this.localTypeIds.get(aClass);
   }
 
   /**
    * Returns the local type that should be used for deserializing blobs of the given typeId for the
    * given local class. Returns null if no such local type exists.
    */
-  public UnreadPdxType getExistingTypeForClass(Class<?> c, int typeId) {
-    Map<Integer, UnreadPdxType> m = this.localTypeIdMaps.get(c);
-    if (m != null) {
-      return m.get(typeId);
+  UnreadPdxType getExistingTypeForClass(Class<?> aClass, int typeId) {
+    Map<Integer, UnreadPdxType> map = this.localTypeIdMaps.get(aClass);
+    if (map != null) {
+      return map.get(typeId);
     } else {
       return null;
     }
   }
 
-  public void defineUnreadType(Class<?> c, UnreadPdxType unreadPdxType) {
+  void defineUnreadType(Class<?> aClass, UnreadPdxType unreadPdxType) {
     int typeId = unreadPdxType.getTypeId();
     // even though localTypeIdMaps is copy on write we need to sync it
     // during write to safely update the nested map.
     // We make the nested map copy-on-write so that readers don't need to sync.
     synchronized (this.localTypeIdMaps) {
-      Map<Integer, UnreadPdxType> m = this.localTypeIdMaps.get(c);
-      if (m == null) {
-        m = new CopyOnWriteHashMap<Integer, UnreadPdxType>();
-        this.localTypeIdMaps.put(c, m);
+      Map<Integer, UnreadPdxType> map = this.localTypeIdMaps.get(aClass);
+      if (map == null) {
+        map = new CopyOnWriteHashMap<Integer, UnreadPdxType>();
+        this.localTypeIdMaps.put(aClass, map);
       }
-      m.put(typeId, unreadPdxType);
+      map.put(typeId, unreadPdxType);
     }
   }
 
@@ -189,11 +192,12 @@ public class TypeRegistry {
   public int defineType(PdxType newType) {
     Integer existingId = this.typeToId.get(newType);
     if (existingId != null) {
-      int eid = existingId.intValue();
+      int eid = existingId;
       newType.setTypeId(eid);
       return eid;
     }
-    int id = distributedTypeRegistry.defineType(newType);
+
+    int id = this.distributedTypeRegistry.defineType(newType);
     newType.setTypeId(id);
     PdxType oldType = this.idToType.get(id);
     if (oldType == null) {
@@ -228,7 +232,7 @@ public class TypeRegistry {
   /**
    * Create a type id for a type that was generated locally.
    */
-  public PdxType defineLocalType(Object o, PdxType newType) {
+  PdxType defineLocalType(Object o, PdxType newType) {
     if (o != null) {
       PdxType t = getExistingType(o);
       if (t != null) {
@@ -244,7 +248,6 @@ public class TypeRegistry {
     return newType;
   }
 
-
   /**
    * Test hook that returns the most recently allocated type id
    * 
@@ -253,31 +256,32 @@ public class TypeRegistry {
    * @return the most recently allocated type id
    */
   public int getLastAllocatedTypeId() {
-    return distributedTypeRegistry.getLastAllocatedTypeId();
+    return this.distributedTypeRegistry.getLastAllocatedTypeId();
   }
 
   public TypeRegistration getTypeRegistration() {
-    return distributedTypeRegistry;
+    return this.distributedTypeRegistry;
   }
 
   public void gatewaySenderStarted(GatewaySender gatewaySender) {
-    if (distributedTypeRegistry != null) {
-      distributedTypeRegistry.gatewaySenderStarted(gatewaySender);
+    if (this.distributedTypeRegistry != null) {
+      this.distributedTypeRegistry.gatewaySenderStarted(gatewaySender);
     }
   }
 
   public void creatingDiskStore(DiskStore dsi) {
-    if (cache.getPdxDiskStore() != null && dsi.getName().equals(cache.getPdxDiskStore())) {
-      distributedTypeRegistry.initialize();
+    if (this.cache.getPdxDiskStore() != null
+        && dsi.getName().equals(this.cache.getPdxDiskStore())) {
+      this.distributedTypeRegistry.initialize();
     }
   }
 
   public void creatingPersistentRegion() {
-    distributedTypeRegistry.creatingPersistentRegion();
+    this.distributedTypeRegistry.creatingPersistentRegion();
   }
 
   public void creatingPool() {
-    distributedTypeRegistry.creatingPool();
+    this.distributedTypeRegistry.creatingPool();
   }
 
   // test hook
@@ -285,23 +289,24 @@ public class TypeRegistry {
     this.localTypeIds.remove(o.getClass());
   }
 
-  public PdxUnreadData getUnreadData(Object o) {
+  PdxUnreadData getUnreadData(Object o) {
     return this.unreadDataMap.get(o);
   }
 
-  public void putUnreadData(Object o, PdxUnreadData ud) {
+  void putUnreadData(Object o, PdxUnreadData ud) {
     this.unreadDataMap.put(o, ud);
   }
 
-  private static final AtomicReference<PdxSerializer> pdxSerializer =
-      new AtomicReference<PdxSerializer>(null);
-  private static final AtomicReference<AutoSerializableManager> asm =
-      new AtomicReference<AutoSerializableManager>(null);
+  private static final AtomicReference<PdxSerializer> pdxSerializer = new AtomicReference<>(null);
+
+  private static final AtomicReference<AutoSerializableManager> asm = new AtomicReference<>(null);
+
   /**
    * To fix bug 45116 we want any attempt to get the PdxSerializer after it has been closed to fail
    * with an exception.
    */
   private static volatile boolean open = false;
+
   /**
    * If the pdxSerializer is ever set to a non-null value then set this to true. It gets reset to
    * false when init() is called. This was added to fix bug 45116.
@@ -357,10 +362,10 @@ public class TypeRegistry {
     if (v != null) {
       Integer id = this.localEnumIds.get(v);
       if (id != null) {
-        result = id.intValue();
+        result = id;
       } else {
-        result = distributedTypeRegistry.getEnumId(v);
-        id = Integer.valueOf(result);
+        result = this.distributedTypeRegistry.getEnumId(v);
+        id = valueOf(result);
         this.localEnumIds.put(v, id);
         EnumInfo ei = new EnumInfo(v);
         this.idToEnum.put(id, ei);
@@ -385,9 +390,9 @@ public class TypeRegistry {
   public int defineEnum(EnumInfo newInfo) {
     Integer existingId = this.enumInfoToId.get(newInfo);
     if (existingId != null) {
-      return existingId.intValue();
+      return existingId;
     }
-    int id = distributedTypeRegistry.defineEnum(newInfo);
+    int id = this.distributedTypeRegistry.defineEnum(newInfo);
     EnumInfo oldInfo = this.idToEnum.get(id);
     if (oldInfo == null) {
       this.idToEnum.put(id, newInfo);
@@ -444,21 +449,20 @@ public class TypeRegistry {
    * server side distributed system is cycled
    */
   public void clear() {
-    if (distributedTypeRegistry.isClient()) {
-      idToType.clear();
-      typeToId.clear();
-      localTypeIds.clear();
-      localTypeIdMaps.clear();
-      unreadDataMap.clear();
-      idToEnum.clear();
-      enumInfoToId.clear();
-      localEnumIds.clear();
+    if (this.distributedTypeRegistry.isClient()) {
+      this.idToType.clear();
+      this.typeToId.clear();
+      this.localTypeIds.clear();
+      this.localTypeIdMaps.clear();
+      this.unreadDataMap.clear();
+      this.idToEnum.clear();
+      this.enumInfoToId.clear();
+      this.localEnumIds.clear();
       AutoSerializableManager autoSerializer = getAutoSerializableManager();
       if (autoSerializer != null) {
         autoSerializer.resetCachedTypes();
       }
     }
-
   }
 
   /**
@@ -467,7 +471,7 @@ public class TypeRegistry {
    * @return the types
    */
   public Map<Integer, PdxType> typeMap() {
-    return distributedTypeRegistry.types();
+    return this.distributedTypeRegistry.types();
   }
 
   /**
@@ -476,7 +480,7 @@ public class TypeRegistry {
    * @return the enums
    */
   public Map<Integer, EnumInfo> enumMap() {
-    return distributedTypeRegistry.enums();
+    return this.distributedTypeRegistry.enums();
   }
 
   /**
@@ -487,8 +491,8 @@ public class TypeRegistry {
    * @return PdxType having the field or null if not found
    * 
    */
-  public PdxType getPdxTypeForField(String fieldName, String className) {
-    return distributedTypeRegistry.getPdxTypeForField(fieldName, className);
+  PdxType getPdxTypeForField(String fieldName, String className) {
+    return this.distributedTypeRegistry.getPdxTypeForField(fieldName, className);
   }
 
   public void addImportedType(int typeId, PdxType importedType) {
@@ -522,10 +526,10 @@ public class TypeRegistry {
    * Get the size of the the type registry in this local member
    */
   public int getLocalSize() {
-    int result = distributedTypeRegistry.getLocalSize();
+    int result = this.distributedTypeRegistry.getLocalSize();
     if (result == 0) {
       // If this is the client, go ahead and return the number of cached types we have
-      return idToType.size();
+      return this.idToType.size();
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/WritablePdxInstanceImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/WritablePdxInstanceImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/WritablePdxInstanceImpl.java
index cb080b6..1006d96 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/WritablePdxInstanceImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/WritablePdxInstanceImpl.java
@@ -19,6 +19,7 @@ import java.util.Date;
 
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxFieldDoesNotExistException;
 import org.apache.geode.pdx.PdxFieldTypeMismatchException;
 import org.apache.geode.pdx.WritablePdxInstance;
@@ -80,9 +81,9 @@ public class WritablePdxInstanceImpl extends PdxInstanceImpl implements Writable
       if (getPdxType().getHasDeletedField()) {
         // Need a new type that does not have the deleted field
         PdxType pt = new PdxType(getPdxType().getClassName(), !getPdxType().getNoDomainClass());
-        GemFireCacheImpl gfc = GemFireCacheImpl
+        InternalCache cache = GemFireCacheImpl
             .getForPdx("PDX registry is unavailable because the Cache has been closed.");
-        TypeRegistry tr = gfc.getPdxRegistry();
+        TypeRegistry tr = cache.getPdxRegistry();
         writer = new PdxWriterImpl(pt, tr, os);
       } else {
         writer = new PdxWriterImpl(getPdxType(), os);

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceHelper.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceHelper.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceHelper.java
index 39d16a5..fc21bf0 100755
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceHelper.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceHelper.java
@@ -20,13 +20,13 @@ import java.math.BigInteger;
 import org.apache.logging.log4j.Logger;
 
 import org.apache.geode.cache.CacheFactory;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.pdx.JSONFormatter;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.internal.PdxInstanceFactoryImpl;
 
-/*
+/**
  * This class is intermediate class to create PdxInstance.
  */
 public class PdxInstanceHelper implements JSONToPdxMapper {
@@ -37,15 +37,19 @@ public class PdxInstanceHelper implements JSONToPdxMapper {
   PdxInstance m_pdxInstance;
   String m_PdxName;// when pdx is member, else null if part of lists
 
+  private InternalCache getCache() {
+    return (InternalCache) CacheFactory.getAnyInstance();
+  }
+
   public PdxInstanceHelper(String className, JSONToPdxMapper parent) {
-    GemFireCacheImpl gci = (GemFireCacheImpl) CacheFactory.getAnyInstance();
+    InternalCache cache = getCache();
     if (logger.isTraceEnabled()) {
       logger.trace("ClassName {}", className);
     }
     m_PdxName = className;
     m_parent = parent;
-    m_pdxInstanceFactory =
-        (PdxInstanceFactoryImpl) gci.createPdxInstanceFactory(JSONFormatter.JSON_CLASSNAME, false);
+    m_pdxInstanceFactory = (PdxInstanceFactoryImpl) cache
+        .createPdxInstanceFactory(JSONFormatter.JSON_CLASSNAME, false);
   }
 
   public JSONToPdxMapper getParent() {

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceSortedHelper.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceSortedHelper.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceSortedHelper.java
index 7f510da..24e596a 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceSortedHelper.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxInstanceSortedHelper.java
@@ -16,28 +16,20 @@ package org.apache.geode.pdx.internal.json;
 
 import java.math.BigDecimal;
 import java.math.BigInteger;
-import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
-import java.util.Iterator;
 import java.util.LinkedList;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.SortedMap;
-import java.util.concurrent.ConcurrentSkipListMap;
 
 import org.apache.logging.log4j.Logger;
 
 import org.apache.geode.cache.CacheFactory;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.pdx.FieldType;
 import org.apache.geode.pdx.JSONFormatter;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.PdxInstanceFactory;
-import org.apache.geode.pdx.internal.PdxInstanceFactoryImpl;
 
-/*
+/**
  * This class is intermediate class to create PdxInstance.
  */
 public class PdxInstanceSortedHelper implements JSONToPdxMapper {
@@ -48,8 +40,11 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
   PdxInstance m_pdxInstance;
   String m_PdxName;// when pdx is member, else null if part of lists
 
+  private InternalCache getCache() {
+    return (InternalCache) CacheFactory.getAnyInstance();
+  }
+
   public PdxInstanceSortedHelper(String className, JSONToPdxMapper parent) {
-    GemFireCacheImpl gci = (GemFireCacheImpl) CacheFactory.getAnyInstance();
     if (logger.isTraceEnabled()) {
       logger.trace("ClassName {}", className);
     }
@@ -57,21 +52,11 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     m_parent = parent;
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#getParent()
-   */
   @Override
   public JSONToPdxMapper getParent() {
     return m_parent;
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#setPdxFieldName(java.lang.String)
-   */
   @Override
   public void setPdxFieldName(String name) {
     if (logger.isTraceEnabled()) {
@@ -104,12 +89,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     }
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addStringField(java.lang.String,
-   * java.lang.String)
-   */
   @Override
   public void addStringField(String fieldName, String value) {
     if (logger.isTraceEnabled()) {
@@ -118,11 +97,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.STRING));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addByteField(java.lang.String, byte)
-   */
   @Override
   public void addByteField(String fieldName, byte value) {
     if (logger.isTraceEnabled()) {
@@ -131,11 +105,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.BYTE));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addShortField(java.lang.String, short)
-   */
   @Override
   public void addShortField(String fieldName, short value) {
     if (logger.isTraceEnabled()) {
@@ -144,11 +113,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.SHORT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addIntField(java.lang.String, int)
-   */
   @Override
   public void addIntField(String fieldName, int value) {
     if (logger.isTraceEnabled()) {
@@ -157,11 +121,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.INT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addLongField(java.lang.String, long)
-   */
   @Override
   public void addLongField(String fieldName, long value) {
     if (logger.isTraceEnabled()) {
@@ -170,12 +129,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.LONG));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addBigDecimalField(java.lang.String,
-   * java.math.BigDecimal)
-   */
   @Override
   public void addBigDecimalField(String fieldName, BigDecimal value) {
     if (logger.isTraceEnabled()) {
@@ -184,12 +137,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.OBJECT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addBigIntegerField(java.lang.String,
-   * java.math.BigInteger)
-   */
   @Override
   public void addBigIntegerField(String fieldName, BigInteger value) {
     if (logger.isTraceEnabled()) {
@@ -198,12 +145,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.OBJECT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addBooleanField(java.lang.String,
-   * boolean)
-   */
   @Override
   public void addBooleanField(String fieldName, boolean value) {
     if (logger.isTraceEnabled()) {
@@ -212,11 +153,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.BOOLEAN));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addFloatField(java.lang.String, float)
-   */
   @Override
   public void addFloatField(String fieldName, float value) {
     if (logger.isTraceEnabled()) {
@@ -225,12 +161,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.FLOAT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addDoubleField(java.lang.String,
-   * double)
-   */
   @Override
   public void addDoubleField(String fieldName, double value) {
     if (logger.isTraceEnabled()) {
@@ -239,11 +169,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, value, FieldType.DOUBLE));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addNullField(java.lang.String)
-   */
   @Override
   public void addNullField(String fieldName) {
     if (logger.isTraceEnabled()) {
@@ -252,12 +177,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, null, FieldType.OBJECT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addListField(java.lang.String,
-   * org.apache.geode.pdx.internal.json.PdxListHelper)
-   */
   @Override
   public void addListField(String fieldName, PdxListHelper list) {
     if (logger.isTraceEnabled()) {
@@ -267,11 +186,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, list.getList(), FieldType.OBJECT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#endListField(java.lang.String)
-   */
   @Override
   public void endListField(String fieldName) {
     if (logger.isTraceEnabled()) {
@@ -279,12 +193,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     }
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#addObjectField(java.lang.String,
-   * org.apache.geode.pdx.PdxInstance)
-   */
   @Override
   public void addObjectField(String fieldName, Object member) {
     if (logger.isTraceEnabled()) {
@@ -296,11 +204,6 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     fieldList.add(new JSONFieldHolder(fieldName, member, FieldType.OBJECT));
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#endObjectField(java.lang.String)
-   */
   @Override
   public void endObjectField(String fieldName) {
     if (logger.isTraceEnabled()) {
@@ -352,30 +255,18 @@ public class PdxInstanceSortedHelper implements JSONToPdxMapper {
     }
   }
 
-
-  static PdxInstanceFactory createPdxInstanceFactory() {
-    GemFireCacheImpl gci = (GemFireCacheImpl) CacheFactory.getAnyInstance();
-    return gci.createPdxInstanceFactory(JSONFormatter.JSON_CLASSNAME, false);
+  private PdxInstanceFactory createPdxInstanceFactory() {
+    InternalCache cache = getCache();
+    return cache.createPdxInstanceFactory(JSONFormatter.JSON_CLASSNAME, false);
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#getPdxInstance()
-   */
   @Override
   public PdxInstance getPdxInstance() {
     return m_pdxInstance;
   }
 
-  /*
-   * (non-Javadoc)
-   * 
-   * @see org.apache.geode.pdx.internal.json.JSONToPdxMapper#getPdxFieldName()
-   */
   @Override
   public String getPdxFieldName() {
-    // return m_fieldName != null ? m_fieldName : "emptyclassname"; //when object is just like { }
     return m_PdxName;
   }
 }

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxListHelper.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxListHelper.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxListHelper.java
index 417e56a..f73a129 100755
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxListHelper.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/json/PdxListHelper.java
@@ -22,8 +22,7 @@ import java.util.List;
 import org.apache.logging.log4j.Logger;
 
 import org.apache.geode.cache.CacheFactory;
-import org.apache.geode.i18n.LogWriterI18n;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.logging.LogService;
 
 /*
@@ -36,8 +35,12 @@ public class PdxListHelper {
   PdxListHelper m_parent;
   List list = new LinkedList();
 
+  private InternalCache getCache() {
+    return (InternalCache) CacheFactory.getAnyInstance();
+  }
+
   public PdxListHelper(PdxListHelper parent, String name) {
-    GemFireCacheImpl gci = (GemFireCacheImpl) CacheFactory.getAnyInstance();
+    InternalCache cache = getCache();
     m_name = name;
     if (logger.isTraceEnabled()) {
       logger.trace("PdxListHelper name: {}", name);
@@ -155,7 +158,6 @@ public class PdxListHelper {
     if (logger.isTraceEnabled()) {
       logger.trace("addObjectField fieldName: {}", fieldName);
     }
-    // dpi.setPdxFieldName(fieldName);
     list.add(dpi.getPdxInstance());
   }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/redis/GeodeRedisServer.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/redis/GeodeRedisServer.java b/geode-core/src/main/java/org/apache/geode/redis/GeodeRedisServer.java
index 32a40dd..9ffc075 100644
--- a/geode-core/src/main/java/org/apache/geode/redis/GeodeRedisServer.java
+++ b/geode-core/src/main/java/org/apache/geode/redis/GeodeRedisServer.java
@@ -45,24 +45,33 @@ import io.netty.channel.socket.SocketChannel;
 import io.netty.channel.socket.nio.NioServerSocketChannel;
 import io.netty.channel.socket.oio.OioServerSocketChannel;
 import io.netty.util.concurrent.Future;
-import org.apache.geode.annotations.Experimental;
-import org.apache.geode.cache.*;
-import org.apache.geode.redis.internal.ByteArrayWrapper;
-import org.apache.geode.redis.internal.ByteToCommandDecoder;
-import org.apache.geode.redis.internal.Coder;
-import org.apache.geode.redis.internal.ExecutionHandlerContext;
-import org.apache.geode.redis.internal.RedisDataType;
-import org.apache.geode.redis.internal.RegionProvider;
 
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.LogWriter;
+import org.apache.geode.annotations.Experimental;
+import org.apache.geode.cache.AttributesFactory;
+import org.apache.geode.cache.Cache;
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.EntryEvent;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.RegionDestroyedException;
+import org.apache.geode.cache.RegionFactory;
+import org.apache.geode.cache.RegionShortcut;
 import org.apache.geode.cache.util.CacheListenerAdapter;
 import org.apache.geode.distributed.internal.InternalDistributedSystem;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegionArguments;
 import org.apache.geode.internal.hll.HyperLogLogPlus;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.net.SocketCreator;
+import org.apache.geode.redis.internal.ByteArrayWrapper;
+import org.apache.geode.redis.internal.ByteToCommandDecoder;
+import org.apache.geode.redis.internal.Coder;
+import org.apache.geode.redis.internal.ExecutionHandlerContext;
+import org.apache.geode.redis.internal.RedisDataType;
+import org.apache.geode.redis.internal.RegionProvider;
 
 /**
  * The GeodeRedisServer is a server that understands the Redis protocol. As commands are sent to the
@@ -142,7 +151,7 @@ public class GeodeRedisServer {
   private static Thread mainThread = null;
 
   /**
-   * The default Redis port as specified by their protocol, {@value #DEFAULT_REDIS_SERVER_PORT}
+   * The default Redis port as specified by their protocol, {@code DEFAULT_REDIS_SERVER_PORT}
    */
   public static final int DEFAULT_REDIS_SERVER_PORT = 6379;
 
@@ -213,25 +222,25 @@ public class GeodeRedisServer {
 
   /**
    * The field that defines the name of the {@link Region} which holds all of the strings. The
-   * current value of this field is {@value #STRING_REGION}.
+   * current value of this field is {@code STRING_REGION}.
    */
   public static final String STRING_REGION = "ReDiS_StRiNgS";
 
   /**
    * The field that defines the name of the {@link Region} which holds all of the HyperLogLogs. The
-   * current value of this field is {@value #HLL_REGION}.
+   * current value of this field is {@code HLL_REGION}.
    */
   public static final String HLL_REGION = "ReDiS_HlL";
 
   /**
    * The field that defines the name of the {@link Region} which holds all of the Redis meta data.
-   * The current value of this field is {@value #REDIS_META_DATA_REGION}.
+   * The current value of this field is {@code REDIS_META_DATA_REGION}.
    */
   public static final String REDIS_META_DATA_REGION = "__ReDiS_MeTa_DaTa";
 
   /**
    * The system property name used to set the default {@link Region} creation type. The property
-   * name is {@value #DEFAULT_REGION_SYS_PROP_NAME} and the acceptable values are types defined by
+   * name is {@code DEFAULT_REGION_SYS_PROP_NAME} and the acceptable values are types defined by
    * {@link RegionShortcut}, i.e. "PARTITION" would be used for {@link RegionShortcut#PARTITION}.
    */
   public static final String DEFAULT_REGION_SYS_PROP_NAME = "gemfireredis.regiontype";
@@ -290,7 +299,7 @@ public class GeodeRedisServer {
   }
 
   /**
-   * Constructor for {@link GeodeRedisServer} that will start the server on the given port and bind
+   * Constructor for {@code GeodeRedisServer} that will start the server on the given port and bind
    * to the first non-loopback address
    * 
    * @param port The port the server will bind to, will use {@value #DEFAULT_REDIS_SERVER_PORT} by
@@ -301,7 +310,7 @@ public class GeodeRedisServer {
   }
 
   /**
-   * Constructor for {@link GeodeRedisServer} that will start the server and bind to the given
+   * Constructor for {@code GeodeRedisServer} that will start the server and bind to the given
    * address and port
    * 
    * @param bindAddress The address to which the server will attempt to bind to
@@ -312,9 +321,8 @@ public class GeodeRedisServer {
     this(bindAddress, port, null);
   }
 
-
   /**
-   * Constructor for {@link GeodeRedisServer} that will start the server and bind to the given
+   * Constructor for {@code GeodeRedisServer} that will start the server and bind to the given
    * address and port. Keep in mind that the log level configuration will only be set if a
    * {@link Cache} does not already exist, if one already exists then setting that property will
    * have no effect.
@@ -367,7 +375,7 @@ public class GeodeRedisServer {
   }
 
   /**
-   * This is function to call on a {@link GeodeRedisServer} instance to start it running
+   * This is function to call on a {@code GeodeRedisServer} instance to start it running
    */
   public synchronized void start() {
     if (!started) {
@@ -386,24 +394,24 @@ public class GeodeRedisServer {
 
   /**
    * Initializes the {@link Cache}, and creates Redis necessities Region and protects declares that
-   * {@link Region} to be protected. Also, every {@link GeodeRedisServer} will check for entries
+   * {@link Region} to be protected. Also, every {@code GeodeRedisServer} will check for entries
    * already in the meta data Region.
    */
   private void startGemFire() {
-    Cache c = GemFireCacheImpl.getInstance();
-    if (c == null) {
+    Cache cache = GemFireCacheImpl.getInstance();
+    if (cache == null) {
       synchronized (GeodeRedisServer.class) {
-        c = GemFireCacheImpl.getInstance();
-        if (c == null) {
+        cache = GemFireCacheImpl.getInstance();
+        if (cache == null) {
           CacheFactory cacheFactory = new CacheFactory();
           if (logLevel != null)
             cacheFactory.set(LOG_LEVEL, logLevel);
-          c = cacheFactory.create();
+          cache = cacheFactory.create();
         }
       }
     }
-    this.cache = c;
-    this.logger = c.getLogger();
+    this.cache = cache;
+    this.logger = cache.getLogger();
   }
 
   private void initializeRedis() {
@@ -412,7 +420,7 @@ public class GeodeRedisServer {
 
       Region<ByteArrayWrapper, HyperLogLogPlus> hLLRegion;
       Region<String, RedisDataType> redisMetaData;
-      GemFireCacheImpl gemFireCache = (GemFireCacheImpl) cache;
+      InternalCache gemFireCache = (InternalCache) cache;
       try {
         if ((stringsRegion = cache.getRegion(STRING_REGION)) == null) {
           RegionFactory<ByteArrayWrapper, ByteArrayWrapper> regionFactory =
@@ -611,7 +619,7 @@ public class GeodeRedisServer {
   }
 
   /**
-   * Shutdown method for {@link GeodeRedisServer}. This closes the {@link Cache}, interrupts all
+   * Shutdown method for {@code GeodeRedisServer}. This closes the {@link Cache}, interrupts all
    * execution and forcefully closes all connections.
    */
   public synchronized void shutdown() {
@@ -637,7 +645,7 @@ public class GeodeRedisServer {
   }
 
   /**
-   * Static main method that allows the {@link GeodeRedisServer} to be started from the command
+   * Static main method that allows the {@code GeodeRedisServer} to be started from the command
    * line. The supported command line arguments are
    * <p>
    * -port= <br>

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/redis/internal/RegionProvider.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/redis/internal/RegionProvider.java b/geode-core/src/main/java/org/apache/geode/redis/internal/RegionProvider.java
index 5994d7d..1de73a0 100644
--- a/geode-core/src/main/java/org/apache/geode/redis/internal/RegionProvider.java
+++ b/geode-core/src/main/java/org/apache/geode/redis/internal/RegionProvider.java
@@ -54,8 +54,6 @@ import org.apache.geode.redis.GeodeRedisServer;
  * because some keys for Redis represented as a {@link Region} in {@link GeodeRedisServer} come with
  * additional state. Therefore getting, creating, or destroying a {@link Region} needs to be
  * synchronized, which is done away with and abstracted by this class.
- * 
- *
  */
 public class RegionProvider implements Closeable {
 

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/redis/internal/executor/list/ListExecutor.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/redis/internal/executor/list/ListExecutor.java b/geode-core/src/main/java/org/apache/geode/redis/internal/executor/list/ListExecutor.java
index ab80a36..fd518b9 100755
--- a/geode-core/src/main/java/org/apache/geode/redis/internal/executor/list/ListExecutor.java
+++ b/geode-core/src/main/java/org/apache/geode/redis/internal/executor/list/ListExecutor.java
@@ -24,12 +24,11 @@ import org.apache.geode.redis.internal.ExecutionHandlerContext;
 import org.apache.geode.redis.internal.RedisDataType;
 import org.apache.geode.redis.internal.executor.AbstractExecutor;
 
-
 public abstract class ListExecutor extends AbstractExecutor {
 
   protected static final int LIST_EMPTY_SIZE = 2;
 
-  protected static enum ListDirection {
+  protected enum ListDirection {
     LEFT, RIGHT
   };
 
@@ -76,7 +75,7 @@ public abstract class ListExecutor extends AbstractExecutor {
       index += pushType == ListDirection.LEFT ? -1 : 1; // Subtract index if left push, add if right
                                                         // push
 
-    /**
+    /*
      * Multi push command
      * 
      * For every element that needs to be added
@@ -86,7 +85,7 @@ public abstract class ListExecutor extends AbstractExecutor {
       byte[] value = commandElems.get(i);
       ByteArrayWrapper wrapper = new ByteArrayWrapper(value);
 
-      /**
+      /*
        * 
        * First, use the start index to attempt to insert the value into the Region
        * 
@@ -101,7 +100,7 @@ public abstract class ListExecutor extends AbstractExecutor {
         }
       } while (oldValue != null);
 
-      /**
+      /*
        * 
        * Next, update the index in the meta data region. Keep trying to replace the existing index
        * unless the index is further out than previously inserted, that's ok. Example below:

http://git-wip-us.apache.org/repos/asf/geode/blob/654d65b5/geode-core/src/main/java/org/apache/geode/security/AuthInitialize.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/security/AuthInitialize.java b/geode-core/src/main/java/org/apache/geode/security/AuthInitialize.java
index 05ba515..6b0675a 100644
--- a/geode-core/src/main/java/org/apache/geode/security/AuthInitialize.java
+++ b/geode-core/src/main/java/org/apache/geode/security/AuthInitialize.java
@@ -22,6 +22,7 @@ import org.apache.geode.cache.CacheCallback;
 import org.apache.geode.distributed.DistributedMember;
 import org.apache.geode.distributed.DistributedSystem;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 
 // TODO Add example usage of this interface and configuration details
 /**
@@ -30,7 +31,7 @@ import org.apache.geode.internal.cache.GemFireCacheImpl;
  * server/locator side respectively. Implementations should register name of the static creation
  * function (that returns an object of the class) as the <i>security-peer-auth-init</i> system
  * property on peers and as the <i>security-client-auth-init</i> system property on clients.
- * 
+ *
  * @since GemFire 5.5
  */
 public interface AuthInitialize extends CacheCallback {
@@ -46,6 +47,7 @@ public interface AuthInitialize extends CacheCallback {
    *
    * @deprecated since Geode 1.0, use init()
    */
+  @Deprecated
   public void init(LogWriter systemLogger, LogWriter securityLogger)
       throws AuthenticationFailedException;
 
@@ -53,8 +55,8 @@ public interface AuthInitialize extends CacheCallback {
    * @since Geode 1.0. implement this method instead of init with logwriters. Implementation should
    *        use log4j instead of these loggers.
    */
-  default public void init() {
-    GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
+  public default void init() {
+    InternalCache cache = GemFireCacheImpl.getInstance();
     init(cache.getLogger(), cache.getSecurityLogger());
   }
 
@@ -83,6 +85,7 @@ public interface AuthInitialize extends CacheCallback {
    * @deprecated since Geode 1.0, use getCredentials(Properties). When using Integrated security,
    *             all members, peer/client will use the same credentials.
    */
+  @Deprecated
   public Properties getCredentials(Properties securityProps, DistributedMember server,
       boolean isPeer) throws AuthenticationFailedException;
 
@@ -93,7 +96,7 @@ public interface AuthInitialize extends CacheCallback {
    * @return the credentials to be used. It needs to contain "security-username" and
    *         "security-password"
    */
-  default public Properties getCredentials(Properties securityProps) {
+  default Properties getCredentials(Properties securityProps) {
     return getCredentials(securityProps, null, true);
   }
 }