You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@geode.apache.org by kl...@apache.org on 2017/05/04 18:37:18 UTC

[03/54] [abbrv] geode git commit: GEODE-2632: change dependencies on GemFireCacheImpl to InternalCache

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/management/internal/beans/MemberMBeanBridge.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/beans/MemberMBeanBridge.java b/geode-core/src/main/java/org/apache/geode/management/internal/beans/MemberMBeanBridge.java
index 2b847d0..f6450f7 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/beans/MemberMBeanBridge.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/beans/MemberMBeanBridge.java
@@ -14,6 +14,8 @@
  */
 package org.apache.geode.management.internal.beans;
 
+import static org.apache.geode.internal.lang.SystemUtils.getLineSeparator;
+
 import org.apache.geode.Statistics;
 import org.apache.geode.StatisticsType;
 import org.apache.geode.cache.CacheClosedException;
@@ -319,16 +321,9 @@ public class MemberMBeanBridge {
       logger.info(LogMarker.CONFIG, "Command Service could not be initialized. {}", e.getMessage());
     } catch (DependenciesNotFoundException e) {
       commandServiceInitError = e.getMessage();
-      if (CacheServerLauncher.isDedicatedCacheServer) {
-        // log as error for dedicated cache server - launched through script
-        // LOG:CONFIG:
-        logger.info(LogMarker.CONFIG, "Command Service could not be initialized. {}",
-            e.getMessage());
-      } else {
-        // LOG:CONFIG:
-        logger.info(LogMarker.CONFIG, "Command Service could not be initialized. {}",
-            e.getMessage());
-      }
+      // log as error for dedicated cache server - launched through script
+      // LOG:CONFIG:
+      logger.info(LogMarker.CONFIG, "Command Service could not be initialized. {}", e.getMessage());
     }
 
     intitGemfireProperties();
@@ -336,7 +331,7 @@ public class MemberMBeanBridge {
     try {
       InetAddress addr = SocketCreator.getLocalHost();
       this.hostname = addr.getHostName();
-    } catch (UnknownHostException ex) {
+    } catch (UnknownHostException ignore) {
       this.hostname = ManagementConstants.DEFAULT_HOST_NAME;
     }
 
@@ -787,13 +782,13 @@ public class MemberMBeanBridge {
       try {
         maxFileDescriptorCount =
             (Long) mbeanServer.getAttribute(osObjectName, "MaxFileDescriptorCount");
-      } catch (Exception e) {
+      } catch (Exception ignore) {
         maxFileDescriptorCount = -1;
       }
       try {
         committedVirtualMemorySize =
             (Long) mbeanServer.getAttribute(osObjectName, "CommittedVirtualMemorySize");
-      } catch (Exception e) {
+      } catch (Exception ignore) {
         committedVirtualMemorySize = -1;
       }
 
@@ -804,23 +799,23 @@ public class MemberMBeanBridge {
         try {
           totalPhysicalMemorySize =
               systemStat.get(StatsKey.LINUX_SYSTEM_PHYSICAL_MEMORY).longValue();
-        } catch (Exception e) {
+        } catch (Exception ignore) {
           totalPhysicalMemorySize = -1;
         }
         try {
           freePhysicalMemorySize = systemStat.get(StatsKey.LINUX_SYSTEM_FREE_MEMORY).longValue();
-        } catch (Exception e) {
+        } catch (Exception ignore) {
           freePhysicalMemorySize = -1;
         }
         try {
           totalSwapSpaceSize = systemStat.get(StatsKey.LINUX_SYSTEM_TOTAL_SWAP_SIZE).longValue();
-        } catch (Exception e) {
+        } catch (Exception ignore) {
           totalSwapSpaceSize = -1;
         }
 
         try {
           freeSwapSpaceSize = systemStat.get(StatsKey.LINUX_SYSTEM_FREE_SWAP_SIZE).longValue();
-        } catch (Exception e) {
+        } catch (Exception ignore) {
           freeSwapSpaceSize = -1;
         }
 
@@ -877,9 +872,9 @@ public class MemberMBeanBridge {
     List<String> compactedStores = new ArrayList<String>();
 
     if (cache != null && !cache.isClosed()) {
-      for (DiskStoreImpl store : cacheImpl.listDiskStoresIncludingRegionOwned()) {
+      for (DiskStore store : cacheImpl.listDiskStoresIncludingRegionOwned()) {
         if (store.forceCompaction()) {
-          compactedStores.add(store.getPersistentID().getDirectory());
+          compactedStores.add(((DiskStoreImpl) store).getPersistentID().getDirectory());
 
         }
       }
@@ -897,7 +892,7 @@ public class MemberMBeanBridge {
   public String[] listDiskStores(boolean includeRegionOwned) {
     GemFireCacheImpl cacheImpl = (GemFireCacheImpl) cache;
     String[] retStr = null;
-    Collection<DiskStoreImpl> diskCollection = null;
+    Collection<DiskStore> diskCollection = null;
     if (includeRegionOwned) {
       diskCollection = cacheImpl.listDiskStoresIncludingRegionOwned();
     } else {
@@ -905,7 +900,7 @@ public class MemberMBeanBridge {
     }
     if (diskCollection != null && diskCollection.size() > 0) {
       retStr = new String[diskCollection.size()];
-      Iterator<DiskStoreImpl> it = diskCollection.iterator();
+      Iterator<DiskStore> it = diskCollection.iterator();
       int i = 0;
       while (it.hasNext()) {
         retStr[i] = it.next().getName();
@@ -965,13 +960,14 @@ public class MemberMBeanBridge {
       return LocalizedStrings.SystemMemberImpl_NO_LOG_FILE_CONFIGURED_LOG_MESSAGES_WILL_BE_DIRECTED_TO_STDOUT
           .toLocalizedString();
     } else {
-      StringBuffer result = new StringBuffer();
+      StringBuilder result = new StringBuilder();
       if (mainTail != null) {
         result.append(mainTail);
       }
       if (childTail != null) {
-        result.append(
-            "\n" + LocalizedStrings.SystemMemberImpl_TAIL_OF_CHILD_LOG.toLocalizedString() + "\n");
+        result.append(getLineSeparator())
+            .append(LocalizedStrings.SystemMemberImpl_TAIL_OF_CHILD_LOG.toLocalizedString())
+            .append(getLineSeparator());
         result.append(childTail);
       }
       return result.toString();
@@ -993,7 +989,7 @@ public class MemberMBeanBridge {
           try {
             // Allow the Function call to exit
             Thread.sleep(1000);
-          } catch (InterruptedException e) {
+          } catch (InterruptedException ignore) {
           }
           ConnectionTable.threadWantsSharedResources();
           if (ids.isConnected()) {
@@ -1021,8 +1017,8 @@ public class MemberMBeanBridge {
     GemFireCacheImpl cache = GemFireCacheImpl.getInstance();
 
     if (cache != null) {
-      Collection<DiskStoreImpl> diskStores = cache.listDiskStoresIncludingRegionOwned();
-      for (DiskStoreImpl store : diskStores) {
+      Collection<DiskStore> diskStores = cache.listDiskStoresIncludingRegionOwned();
+      for (DiskStore store : diskStores) {
         store.flush();
       }
     }
@@ -1201,8 +1197,7 @@ public class MemberMBeanBridge {
    */
   public long getTotalBytesInUse() {
     MemoryUsage memHeap = memoryMXBean.getHeapMemoryUsage();
-    long bytesUsed = memHeap.getUsed();
-    return bytesUsed;
+    return memHeap.getUsed();
   }
 
   /**
@@ -1334,9 +1329,8 @@ public class MemberMBeanBridge {
       return false;
     }
     try {
-      boolean isManager = service.isManager();
-      return isManager;
-    } catch (Exception e) {
+      return service.isManager();
+    } catch (Exception ignore) {
       return false;
     }
   }
@@ -1354,22 +1348,18 @@ public class MemberMBeanBridge {
     }
     try {
       return service.isManagerCreated();
-    } catch (Exception e) {
+    } catch (Exception ignore) {
       return false;
     }
   }
 
   /**
-   * 
    * @return true if member has a server
    */
   public boolean isServer() {
     return cache.isServer();
   }
 
-  /** Statistics Related Attributes **/
-  /*********************************************************************************************************/
-
   public int getInitialImageKeysReceived() {
     return getMemberLevelStatistic(StatsKey.GET_INITIAL_IMAGE_KEYS_RECEIVED).intValue();
   }
@@ -1701,7 +1691,7 @@ public class MemberMBeanBridge {
     try {
       maxFileDescriptorCount =
           (Long) mbeanServer.getAttribute(osObjectName, "MaxFileDescriptorCount");
-    } catch (Exception e) {
+    } catch (Exception ignore) {
       maxFileDescriptorCount = -1;
     }
     return maxFileDescriptorCount;
@@ -1729,11 +1719,17 @@ public class MemberMBeanBridge {
     return objects;
   }
 
+  /**
+   * @deprecated Please use {@link #getOffHeapFreeMemory()} instead.
+   */
   @Deprecated
   public long getOffHeapFreeSize() {
     return getOffHeapFreeMemory();
   }
 
+  /**
+   * @deprecated Please use {@link #getOffHeapUsedMemory()} instead.
+   */
   @Deprecated
   public long getOffHeapUsedSize() {
     return getOffHeapUsedMemory();

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/management/internal/configuration/handlers/ConfigurationRequestHandler.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/handlers/ConfigurationRequestHandler.java b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/handlers/ConfigurationRequestHandler.java
index 6495d09..201bcd6 100644
--- a/geode-core/src/main/java/org/apache/geode/management/internal/configuration/handlers/ConfigurationRequestHandler.java
+++ b/geode-core/src/main/java/org/apache/geode/management/internal/configuration/handlers/ConfigurationRequestHandler.java
@@ -48,7 +48,7 @@ public class ConfigurationRequestHandler implements TcpHandler {
     try {
       logger.info("Received request for configuration  : {}", request);
       ConfigurationRequest configRequest = (ConfigurationRequest) request;
-      return sharedConfig.createConfigurationReponse(configRequest);
+      return sharedConfig.createConfigurationResponse(configRequest);
     } catch (Exception e) {
       logger.info(e.getMessage(), e);
       return null;

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
index d458a22..2214456 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/PdxInstanceFactory.java
@@ -16,7 +16,6 @@ package org.apache.geode.pdx;
 
 import java.util.Date;
 
-import org.apache.geode.cache.CacheFactory;
 import org.apache.geode.cache.RegionService;
 
 /**
@@ -41,7 +40,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>char</code>.
+   * {@code char}.
    * <p>
    * Java char is mapped to .NET System.Char.
    * 
@@ -55,7 +54,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>boolean</code>.
+   * {@code boolean}.
    * <p>
    * Java boolean is mapped to .NET System.Boolean.
    * 
@@ -69,7 +68,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>byte</code>.
+   * {@code byte}.
    * <p>
    * Java byte is mapped to .NET System.SByte.
    * 
@@ -83,7 +82,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>short</code>.
+   * {@code short}.
    * <p>
    * Java short is mapped to .NET System.Int16.
    * 
@@ -97,7 +96,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>int</code>.
+   * {@code int}.
    * <p>
    * Java int is mapped to .NET System.Int32.
    * 
@@ -111,7 +110,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>long</code>.
+   * {@code long}.
    * <p>
    * Java long is mapped to .NET System.Int64.
    * 
@@ -125,7 +124,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>float</code>.
+   * {@code float}.
    * <p>
    * Java float is mapped to .NET System.Float.
    * 
@@ -139,7 +138,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>double</code>.
+   * {@code double}.
    * <p>
    * Java double is mapped to .NET System.Double.
    * 
@@ -153,7 +152,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>Date</code>.
+   * {@code Date}.
    * <p>
    * Java Date is mapped to .NET System.DateTime.
    * 
@@ -167,7 +166,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>String</code>.
+   * {@code String}.
    * <p>
    * Java String is mapped to .NET System.String.
    * 
@@ -181,7 +180,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>Object</code>.
+   * {@code Object}.
    * <p>
    * It is best to use one of the other writeXXX methods if your field type will always be XXX. This
    * method allows the field value to be anything that is an instance of Object. This gives you more
@@ -201,16 +200,16 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>Object</code>.
+   * {@code Object}.
    * <p>
    * It is best to use one of the other writeXXX methods if your field type will always be XXX. This
    * method allows the field value to be anything that is an instance of Object. This gives you more
    * flexibility but more space is used to store the serialized field.
    * <p>
    * Note that some Java objects serialized with this method may not be compatible with non-java
-   * languages. To ensure that only portable objects are serialized set the
-   * <code>checkPortability</code> parameter to true. The following is a list of the Java classes
-   * that are portable and the .NET class they are mapped to:
+   * languages. To ensure that only portable objects are serialized set the {@code checkPortability}
+   * parameter to true. The following is a list of the Java classes that are portable and the .NET
+   * class they are mapped to:
    * <ul>
    * <li>instances of {@link PdxSerializable}: .NET class of same name
    * <li>instances of {@link PdxInstance}: .NET class of same name
@@ -235,10 +234,10 @@ public interface PdxInstanceFactory {
    * <li>double[]: System.Double[]
    * <li>String[]: System.String[]
    * <li>byte[][]: System.Byte[][]
-   * <li>Object[]: System.Collections.Generic.List<Object>
-   * <li>java.util.HashMap: System.Collections.Generics.IDictionary<Object, Object>
+   * <li>Object[]: System.Collections.Generic.List&lt;Object&gt;
+   * <li>java.util.HashMap: System.Collections.Generics.IDictionary&lt;Object, Object&gt;
    * <li>java.util.Hashtable: System.Collections.Hashtable
-   * <li>java.util.ArrayList: System.Collections.Generic.IList<Object>
+   * <li>java.util.ArrayList: System.Collections.Generic.IList&lt;Object&gt;
    * <li>java.util.Vector: System.Collections.ArrayList
    * <li>java.util.HashSet: CacheableHashSet
    * <li>java.util.LinkedHashSet: CacheableLinkedHashSet
@@ -259,7 +258,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>boolean[]</code>.
+   * {@code boolean[]}.
    * <p>
    * Java boolean[] is mapped to .NET System.Boolean[].
    * 
@@ -273,7 +272,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>char[]</code>.
+   * {@code char[]}.
    * <p>
    * Java char[] is mapped to .NET System.Char[].
    * 
@@ -287,7 +286,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>byte[]</code>.
+   * {@code byte[]}.
    * <p>
    * Java byte[] is mapped to .NET System.Byte[].
    * 
@@ -301,7 +300,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>short[]</code>.
+   * {@code short[]}.
    * <p>
    * Java short[] is mapped to .NET System.Int16[].
    * 
@@ -315,7 +314,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>int[]</code>.
+   * {@code int[]}.
    * <p>
    * Java int[] is mapped to .NET System.Int32[].
    * 
@@ -329,7 +328,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>long[]</code>.
+   * {@code long[]}.
    * <p>
    * Java long[] is mapped to .NET System.Int64[].
    * 
@@ -343,7 +342,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>float[]</code>.
+   * {@code float[]}.
    * <p>
    * Java float[] is mapped to .NET System.Float[].
    * 
@@ -357,7 +356,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>double[]</code>.
+   * {@code double[]}.
    * <p>
    * Java double[] is mapped to .NET System.Double[].
    * 
@@ -371,7 +370,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>String[]</code>.
+   * {@code String[]}.
    * <p>
    * Java String[] is mapped to .NET System.String[].
    * 
@@ -385,12 +384,12 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>Object[]</code>.
+   * {@code Object[]}.
    * <p>
-   * Java Object[] is mapped to .NET System.Collections.Generic.List<Object>. For how each element
-   * of the array is a mapped to .NET see {@link #writeObject(String, Object, boolean) writeObject}.
-   * Note that this call may serialize elements that are not compatible with non-java languages. To
-   * ensure that only portable objects are serialized use
+   * Java Object[] is mapped to .NET System.Collections.Generic.List&lt;Object&gt;. For how each
+   * element of the array is a mapped to .NET see {@link #writeObject(String, Object, boolean)
+   * writeObject}. Note that this call may serialize elements that are not compatible with non-java
+   * languages. To ensure that only portable objects are serialized use
    * {@link #writeObjectArray(String, Object[], boolean)}.
    * 
    * @param fieldName the name of the field to write
@@ -403,13 +402,13 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>Object[]</code>.
+   * {@code Object[]}.
    * <p>
-   * Java Object[] is mapped to .NET System.Collections.Generic.List<Object>. For how each element
-   * of the array is a mapped to .NET see {@link #writeObject(String, Object, boolean) writeObject}.
-   * Note that this call may serialize elements that are not compatible with non-java languages. To
-   * ensure that only portable objects are serialized use
-   * {@link #writeObjectArray(String, Object[], boolean)}.
+   * Java Object[] is mapped to .NET System.Collections.Generic.List&lt;Object&gt;. For how each
+   * element of the array is a mapped to .NET see {@link #writeObject(String, Object, boolean)
+   * writeObject}. Note that this call may serialize elements that are not compatible with non-java
+   * languages. To ensure that only portable objects are serialized use
+   * {@code writeObjectArray(String, Object[], boolean)}.
    * 
    * @param fieldName the name of the field to write
    * @param value the value of the field to write
@@ -427,7 +426,7 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value to the serialized form. The fields type is
-   * <code>byte[][]</code>.
+   * {@code byte[][]}.
    * <p>
    * Java byte[][] is mapped to .NET System.Byte[][].
    * 
@@ -441,8 +440,8 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value and type to the serialized form. This method uses
-   * the <code>fieldType</code> to determine which writeXXX method it should call. If it can not
-   * find a specific match to a writeXXX method it will call {@link #writeObject(String, Object)
+   * the {@code fieldType} to determine which writeXXX method it should call. If it can not find a
+   * specific match to a writeXXX method it will call {@link #writeObject(String, Object)
    * writeObject}. This method may serialize objects that are not portable to non-java languages. To
    * ensure that only objects that are portable to non-java languages are serialized use
    * {@link #writeField(String, Object, Class, boolean)} instead.
@@ -476,7 +475,7 @@ public interface PdxInstanceFactory {
    * 
    * @param fieldName the name of the field to write
    * @param fieldValue the value of the field to write; this parameter's class must extend the
-   *        <code>fieldType</code>
+   *        {@code fieldType}
    * @param fieldType the type of the field to write
    * @return this PdxInstanceFactory
    * @throws PdxFieldAlreadyExistsException if the named field has already been written
@@ -487,11 +486,10 @@ public interface PdxInstanceFactory {
 
   /**
    * Writes the named field with the given value and type to the serialized form. This method uses
-   * the <code>fieldType</code> to determine which writeXXX method it should call. If it can not
-   * find a specific match to a writeXXX method it will call
-   * {@link #writeObject(String, Object, boolean) writeObject}. To ensure that only objects that are
-   * portable to non-java languages are serialized set the <code>checkPortability</code> parameter
-   * to true.
+   * the {@code fieldType} to determine which writeXXX method it should call. If it can not find a
+   * specific match to a writeXXX method it will call {@link #writeObject(String, Object, boolean)
+   * writeObject}. To ensure that only objects that are portable to non-java languages are
+   * serialized set the {@code checkPortability} parameter to true.
    * <p>
    * The fieldTypes that map to a specific method are:
    * <ul>
@@ -522,7 +520,7 @@ public interface PdxInstanceFactory {
    * 
    * @param fieldName the name of the field to write
    * @param fieldValue the value of the field to write; this parameter's class must extend the
-   *        <code>fieldType</code>
+   *        {@code fieldType}
    * @param fieldType the type of the field to write
    * @param checkPortability if true then an exception is thrown if a non-portable object is
    *        serialized

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/internal/ClientTypeRegistration.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/ClientTypeRegistration.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/ClientTypeRegistration.java
index 8c48473..c10de03 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/ClientTypeRegistration.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/ClientTypeRegistration.java
@@ -36,7 +36,7 @@ import org.apache.geode.cache.client.internal.GetPDXTypeByIdOp;
 import org.apache.geode.cache.client.internal.GetPDXTypesOp;
 import org.apache.geode.cache.client.internal.PoolImpl;
 import org.apache.geode.cache.wan.GatewaySender;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.PoolManagerImpl;
 import org.apache.geode.internal.logging.LogService;
 
@@ -44,9 +44,9 @@ public class ClientTypeRegistration implements TypeRegistration {
 
   private static final Logger logger = LogService.getLogger();
 
-  private final GemFireCacheImpl cache;
+  private final InternalCache cache;
 
-  public ClientTypeRegistration(GemFireCacheImpl cache) {
+  public ClientTypeRegistration(InternalCache cache) {
     this.cache = cache;
   }
 

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/internal/LonerTypeRegistration.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/LonerTypeRegistration.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/LonerTypeRegistration.java
index 5b82611..0609dd4 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/LonerTypeRegistration.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/LonerTypeRegistration.java
@@ -17,22 +17,19 @@ package org.apache.geode.pdx.internal;
 import java.util.Map;
 
 import org.apache.geode.cache.wan.GatewaySender;
-import org.apache.geode.internal.cache.CacheConfig;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 
 /**
  * A type registration that is used for loners. In the loner case, we'll try to be helpful and not
  * decide what type registration to give the user until they actually use it.
- *
  */
 public class LonerTypeRegistration implements TypeRegistration {
 
   private volatile TypeRegistration delegate = null;
 
-  private final GemFireCacheImpl cache;
-
+  private final InternalCache cache;
 
-  public LonerTypeRegistration(GemFireCacheImpl cache) {
+  public LonerTypeRegistration(InternalCache cache) {
     this.cache = cache;
   }
 
@@ -104,10 +101,9 @@ public class LonerTypeRegistration implements TypeRegistration {
    * Check to see if the current member is a loner and we can't tell if the user wants a peer or a
    * client type registry.
    * 
-   * @param cache
    * @return true if this member is a loner and we can't determine what type of registry they want.
    */
-  public static boolean isIndeterminateLoner(GemFireCacheImpl cache) {
+  public static boolean isIndeterminateLoner(InternalCache cache) {
     boolean isLoner = cache.getInternalDistributedSystem().isLoner();
     boolean pdxConfigured = cache.getPdxPersistent();
     return isLoner && !pdxConfigured/* && !hasGateways */;

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
index 6d2e906..08e3364 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PdxInstanceFactoryImpl.java
@@ -14,15 +14,13 @@
  */
 package org.apache.geode.pdx.internal;
 
-import java.nio.ByteBuffer;
 import java.util.Date;
 
 import org.apache.geode.internal.InternalDataSerializer;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
-import org.apache.geode.internal.tcp.ByteBufferInputStream.ByteSourceFactory;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.pdx.PdxInstance;
 import org.apache.geode.pdx.PdxInstanceFactory;
-import org.apache.geode.pdx.PdxUnreadFields;
 
 /**
  * PdxInstances created with this factory can never be deserialized but you can access their fields
@@ -32,26 +30,27 @@ import org.apache.geode.pdx.PdxUnreadFields;
  * PdxType is expensive since it can never figure out it is already defined without doing an
  * expensive check in the type registry. We should optimize this before making this a public
  * feature.
- *
  */
 public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
 
   private final PdxWriterImpl writer;
+
   private boolean created = false;
 
   private PdxInstanceFactoryImpl(String name, boolean expectDomainClass) {
-    PdxOutputStream os = new PdxOutputStream();
-    PdxType pt = new PdxType(name, expectDomainClass);
-    GemFireCacheImpl gfc = GemFireCacheImpl
+    PdxOutputStream pdxOutputStream = new PdxOutputStream();
+    PdxType pdxType = new PdxType(name, expectDomainClass);
+    InternalCache internalCache = GemFireCacheImpl
         .getForPdx("PDX registry is unavailable because the Cache has been closed.");
-    TypeRegistry tr = gfc.getPdxRegistry();
-    this.writer = new PdxWriterImpl(pt, tr, os);
+    TypeRegistry pdxRegistry = internalCache.getPdxRegistry();
+    this.writer = new PdxWriterImpl(pdxType, pdxRegistry, pdxOutputStream);
   }
 
   public static PdxInstanceFactory newCreator(String name, boolean expectDomainClass) {
     return new PdxInstanceFactoryImpl(name, expectDomainClass);
   }
 
+  @Override
   public PdxInstance create() {
     if (this.created) {
       throw new IllegalStateException("The create method can only be called once.");
@@ -61,135 +60,149 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this.writer.makePdxInstance();
   }
 
+  @Override
   public PdxInstanceFactory writeChar(String fieldName, char value) {
     this.writer.writeChar(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeBoolean(String fieldName, boolean value) {
     this.writer.writeBoolean(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeByte(String fieldName, byte value) {
     this.writer.writeByte(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeShort(String fieldName, short value) {
     this.writer.writeShort(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeInt(String fieldName, int value) {
     this.writer.writeInt(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeLong(String fieldName, long value) {
     this.writer.writeLong(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeFloat(String fieldName, float value) {
     this.writer.writeFloat(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeDouble(String fieldName, double value) {
     this.writer.writeDouble(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeDate(String fieldName, Date date) {
-    this.writer.writeDate(fieldName, date);
+  @Override
+  public PdxInstanceFactory writeDate(String fieldName, Date value) {
+    this.writer.writeDate(fieldName, value);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeString(String fieldName, String value) {
     this.writer.writeString(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeObject(String fieldName, Object object) {
-    return writeObject(fieldName, object, false);
+  @Override
+  public PdxInstanceFactory writeObject(String fieldName, Object value) {
+    return writeObject(fieldName, value, false);
   }
 
-  public PdxInstanceFactory writeBooleanArray(String fieldName, boolean[] array) {
-    this.writer.writeBooleanArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeBooleanArray(String fieldName, boolean[] value) {
+    this.writer.writeBooleanArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeCharArray(String fieldName, char[] array) {
-    this.writer.writeCharArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeCharArray(String fieldName, char[] value) {
+    this.writer.writeCharArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeByteArray(String fieldName, byte[] array) {
-    this.writer.writeByteArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeByteArray(String fieldName, byte[] value) {
+    this.writer.writeByteArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeShortArray(String fieldName, short[] array) {
-    this.writer.writeShortArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeShortArray(String fieldName, short[] value) {
+    this.writer.writeShortArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeIntArray(String fieldName, int[] array) {
-    this.writer.writeIntArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeIntArray(String fieldName, int[] value) {
+    this.writer.writeIntArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeLongArray(String fieldName, long[] array) {
-    this.writer.writeLongArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeLongArray(String fieldName, long[] value) {
+    this.writer.writeLongArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeFloatArray(String fieldName, float[] array) {
-    this.writer.writeFloatArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeFloatArray(String fieldName, float[] value) {
+    this.writer.writeFloatArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeDoubleArray(String fieldName, double[] array) {
-    this.writer.writeDoubleArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeDoubleArray(String fieldName, double[] value) {
+    this.writer.writeDoubleArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeStringArray(String fieldName, String[] array) {
-    this.writer.writeStringArray(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeStringArray(String fieldName, String[] value) {
+    this.writer.writeStringArray(fieldName, value);
     return this;
   }
 
-  public PdxInstanceFactory writeObjectArray(String fieldName, Object[] array) {
-    return writeObjectArray(fieldName, array, false);
-  }
-
-  public PdxInstanceFactory writeUnreadFields(PdxUnreadFields unread) {
-    this.writer.writeUnreadFields(unread);
-    return this;
+  @Override
+  public PdxInstanceFactory writeObjectArray(String fieldName, Object[] value) {
+    return writeObjectArray(fieldName, value, false);
   }
 
-  public PdxInstanceFactory writeRaw(PdxField field, ByteBuffer rawData) {
-    this.writer.writeRawField(field, ByteSourceFactory.create(rawData));
-    return this;
-  }
-
-
-  public PdxInstanceFactory writeArrayOfByteArrays(String fieldName, byte[][] array) {
-    this.writer.writeArrayOfByteArrays(fieldName, array);
+  @Override
+  public PdxInstanceFactory writeArrayOfByteArrays(String fieldName, byte[][] value) {
+    this.writer.writeArrayOfByteArrays(fieldName, value);
     return this;
   }
 
+  @Override
   public <CT, VT extends CT> PdxInstanceFactory writeField(String fieldName, VT fieldValue,
       Class<CT> fieldType) {
     return writeField(fieldName, fieldValue, fieldType, false);
   }
 
+  @Override
   public PdxInstanceFactory markIdentityField(String fieldName) {
     this.writer.markIdentityField(fieldName);
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeObject(String fieldName, Object value, boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
       boolean alreadyInProgress = InternalDataSerializer.isPdxSerializationInProgress();
@@ -210,6 +223,7 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this;
   }
 
+  @Override
   public PdxInstanceFactory writeObjectArray(String fieldName, Object[] value,
       boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
@@ -230,6 +244,7 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
     return this;
   }
 
+  @Override
   public <CT, VT extends CT> PdxInstanceFactory writeField(String fieldName, VT fieldValue,
       Class<CT> fieldType, boolean checkPortability) {
     if (InternalDataSerializer.is662SerializationEnabled()) {
@@ -251,14 +266,14 @@ public class PdxInstanceFactoryImpl implements PdxInstanceFactory {
   }
 
   public static PdxInstance createPdxEnum(String className, String enumName, int enumOrdinal,
-      GemFireCacheImpl gfc) {
+      InternalCache internalCache) {
     if (className == null) {
       throw new IllegalArgumentException("className must not be null");
     }
     if (enumName == null) {
       throw new IllegalArgumentException("enumName must not be null");
     }
-    TypeRegistry tr = gfc.getPdxRegistry();
+    TypeRegistry tr = internalCache.getPdxRegistry();
     EnumInfo ei = new EnumInfo(className, enumName, enumOrdinal);
     return ei.getPdxInstance(tr.defineEnum(ei));
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
index b4fa33e..6b22aac 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/PeerTypeRegistration.java
@@ -23,14 +23,16 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.InternalGemFireError;
 import org.apache.geode.InternalGemFireException;
 import org.apache.geode.cache.AttributesFactory;
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheWriterException;
 import org.apache.geode.cache.DataPolicy;
+import org.apache.geode.cache.DiskStore;
 import org.apache.geode.cache.EntryEvent;
-import org.apache.geode.cache.Operation;
 import org.apache.geode.cache.Region;
 import org.apache.geode.cache.RegionAttributes;
 import org.apache.geode.cache.RegionExistsException;
@@ -50,12 +52,9 @@ import org.apache.geode.distributed.internal.InternalDistributedSystem;
 import org.apache.geode.distributed.internal.locks.DLockService;
 import org.apache.geode.internal.CopyOnWriteHashSet;
 import org.apache.geode.internal.cache.DiskStoreImpl;
-import org.apache.geode.internal.cache.EntryEventImpl;
-import org.apache.geode.internal.cache.EnumListenerEvent;
-import org.apache.geode.internal.cache.EventID;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.cache.InternalRegionArguments;
-import org.apache.geode.internal.cache.LocalRegion;
 import org.apache.geode.internal.cache.TXManagerImpl;
 import org.apache.geode.internal.cache.TXStateProxy;
 import org.apache.geode.internal.logging.LogService;
@@ -63,23 +62,19 @@ import org.apache.geode.internal.util.concurrent.CopyOnWriteHashMap;
 import org.apache.geode.pdx.JSONFormatter;
 import org.apache.geode.pdx.PdxInitializationException;
 import org.apache.geode.pdx.PdxRegistryMismatchException;
-import org.apache.logging.log4j.Logger;
 
-/**
- *
- */
 public class PeerTypeRegistration implements TypeRegistration {
   private static final Logger logger = LogService.getLogger();
 
-  /**
-   * 
-   */
   private static final int MAX_TRANSACTION_FAILURES = 10;
+
   public static final String LOCK_SERVICE_NAME = "__PDX";
+
   /**
    * The region name. Public for tests only.
    */
   public static final String REGION_NAME = "PdxTypes";
+
   public static final String REGION_FULL_PATH = "/" + REGION_NAME;
   public static final int PLACE_HOLDER_FOR_TYPE_ID = 0xFFFFFF;
   public static final int PLACE_HOLDER_FOR_DS_ID = 0xFF000000;
@@ -88,7 +83,7 @@ public class PeerTypeRegistration implements TypeRegistration {
   private final int maxTypeId;
   private volatile DistributedLockService dls;
   private final Object dlsLock = new Object();
-  private GemFireCacheImpl cache;
+  private InternalCache cache;
 
   /**
    * The region where the PDX metadata is stored. Because this region is transactional for our
@@ -111,10 +106,9 @@ public class PeerTypeRegistration implements TypeRegistration {
 
   private volatile boolean typeRegistryInUse = false;
 
-  public PeerTypeRegistration(GemFireCacheImpl cache) {
+  public PeerTypeRegistration(InternalCache cache) {
     this.cache = cache;
 
-
     int distributedSystemId =
         cache.getInternalDistributedSystem().getDistributionManager().getDistributedSystemId();
     if (distributedSystemId == -1) {
@@ -378,7 +372,7 @@ public class PeerTypeRegistration implements TypeRegistration {
     verifyConfiguration();
     Integer existingId = typeToId.get(newType);
     if (existingId != null) {
-      return existingId.intValue();
+      return existingId;
     }
     lock();
     try {
@@ -392,8 +386,7 @@ public class PeerTypeRegistration implements TypeRegistration {
 
       updateIdToTypeRegion(newType);
 
-      typeToId.put(newType, Integer.valueOf(id));
-      // this.cache.getLogger().info("Defining: " + newType, new RuntimeException("STACK"));
+      typeToId.put(newType, id);
 
       return newType.getTypeId();
     } finally {
@@ -537,10 +530,10 @@ public class PeerTypeRegistration implements TypeRegistration {
   }
 
   public boolean hasPersistentRegions() {
-    Collection<DiskStoreImpl> diskStores = cache.listDiskStoresIncludingRegionOwned();
+    Collection<DiskStore> diskStores = cache.listDiskStoresIncludingRegionOwned();
     boolean hasPersistentRegions = false;
-    for (DiskStoreImpl store : diskStores) {
-      hasPersistentRegions |= store.hasPersistedData();
+    for (DiskStore store : diskStores) {
+      hasPersistentRegions |= ((DiskStoreImpl) store).hasPersistedData();
     }
     return hasPersistentRegions;
   }
@@ -574,7 +567,7 @@ public class PeerTypeRegistration implements TypeRegistration {
         } else {
           PdxType foundType = (PdxType) v;
           Integer id = (Integer) k;
-          int tmpDsId = PLACE_HOLDER_FOR_DS_ID & id.intValue();
+          int tmpDsId = PLACE_HOLDER_FOR_DS_ID & id;
           if (tmpDsId == this.dsId) {
             totalPdxTypeIdInDS++;
           }
@@ -635,8 +628,7 @@ public class PeerTypeRegistration implements TypeRegistration {
   private TXStateProxy suspendTX() {
     Cache c = (Cache) getIdToType().getRegionService();
     TXManagerImpl txManager = (TXManagerImpl) c.getCacheTransactionManager();
-    TXStateProxy currentState = txManager.internalSuspend();
-    return currentState;
+    return txManager.internalSuspend();
   }
 
   private void resumeTX(TXStateProxy state) {
@@ -759,8 +751,6 @@ public class PeerTypeRegistration implements TypeRegistration {
 
   /**
    * adds a PdxType for a field to a {@code className => Set<PdxType>} map
-   * 
-   * @param type
    */
   private void updateClassToTypeMap(PdxType type) {
     if (type != null) {

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
----------------------------------------------------------------------
diff --git a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
index ebca878..e245b34 100644
--- a/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
+++ b/geode-core/src/main/java/org/apache/geode/pdx/internal/TypeRegistry.java
@@ -14,6 +14,13 @@
  */
 package org.apache.geode.pdx.internal;
 
+import static java.lang.Integer.*;
+
+import java.util.Map;
+import java.util.concurrent.atomic.AtomicReference;
+
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.cache.CacheClosedException;
 import org.apache.geode.cache.DiskStore;
 import org.apache.geode.cache.DiskStoreFactory;
@@ -21,7 +28,7 @@ import org.apache.geode.cache.wan.GatewaySender;
 import org.apache.geode.distributed.internal.DistributionConfig;
 import org.apache.geode.internal.Assert;
 import org.apache.geode.internal.InternalDataSerializer;
-import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.internal.i18n.LocalizedStrings;
 import org.apache.geode.internal.logging.LogService;
 import org.apache.geode.internal.util.concurrent.CopyOnWriteHashMap;
@@ -29,11 +36,6 @@ import org.apache.geode.internal.util.concurrent.CopyOnWriteWeakHashMap;
 import org.apache.geode.pdx.PdxSerializationException;
 import org.apache.geode.pdx.PdxSerializer;
 import org.apache.geode.pdx.ReflectionBasedAutoSerializer;
-import org.apache.logging.log4j.Logger;
-
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicReference;
-
 
 public class TypeRegistry {
   private static final Logger logger = LogService.getLogger();
@@ -41,31 +43,39 @@ public class TypeRegistry {
   private static final boolean DISABLE_TYPE_REGISTRY =
       Boolean.getBoolean(DistributionConfig.GEMFIRE_PREFIX + "TypeRegistry.DISABLE_PDX_REGISTRY");
 
-  private final Map<Integer, PdxType> idToType = new CopyOnWriteHashMap<Integer, PdxType>();
-  private final Map<PdxType, Integer> typeToId = new CopyOnWriteHashMap<PdxType, Integer>();
-  private final Map<Class<?>, PdxType> localTypeIds =
-      new CopyOnWriteWeakHashMap<Class<?>, PdxType>();
+  private final Map<Integer, PdxType> idToType = new CopyOnWriteHashMap<>();
+
+  private final Map<PdxType, Integer> typeToId = new CopyOnWriteHashMap<>();
+
+  private final Map<Class<?>, PdxType> localTypeIds = new CopyOnWriteWeakHashMap<>();
+
   private final Map<Class<?>, Map<Integer, UnreadPdxType>> localTypeIdMaps =
-      new CopyOnWriteWeakHashMap<Class<?>, Map<Integer, UnreadPdxType>>();
+      new CopyOnWriteWeakHashMap<>();
+
   private final WeakConcurrentIdentityHashMap<Object, PdxUnreadData> unreadDataMap =
       WeakConcurrentIdentityHashMap.make();
-  private final Map<Integer, EnumInfo> idToEnum = new CopyOnWriteHashMap<Integer, EnumInfo>();
-  private final Map<EnumInfo, Integer> enumInfoToId = new CopyOnWriteHashMap<EnumInfo, Integer>();
-  private final Map<Enum<?>, Integer> localEnumIds = new CopyOnWriteWeakHashMap<Enum<?>, Integer>();
+
+  private final Map<Integer, EnumInfo> idToEnum = new CopyOnWriteHashMap<>();
+
+  private final Map<EnumInfo, Integer> enumInfoToId = new CopyOnWriteHashMap<>();
+
+  private final Map<Enum<?>, Integer> localEnumIds = new CopyOnWriteWeakHashMap<>();
+
   private final TypeRegistration distributedTypeRegistry;
-  private final GemFireCacheImpl cache;
 
-  public TypeRegistry(GemFireCacheImpl cache, boolean disableTypeRegistry) {
+  private final InternalCache cache;
+
+  public TypeRegistry(InternalCache cache, boolean disableTypeRegistry) {
     this.cache = cache;
 
     if (DISABLE_TYPE_REGISTRY || disableTypeRegistry) {
-      distributedTypeRegistry = new NullTypeRegistration();
+      this.distributedTypeRegistry = new NullTypeRegistration();
     } else if (cache.hasPool()) {
-      distributedTypeRegistry = new ClientTypeRegistration(cache);
+      this.distributedTypeRegistry = new ClientTypeRegistration(cache);
     } else if (LonerTypeRegistration.isIndeterminateLoner(cache)) {
-      distributedTypeRegistry = new LonerTypeRegistration(cache);
+      this.distributedTypeRegistry = new LonerTypeRegistration(cache);
     } else {
-      distributedTypeRegistry = new PeerTypeRegistration(cache);
+      this.distributedTypeRegistry = new PeerTypeRegistration(cache);
     }
   }
 
@@ -77,7 +87,7 @@ public class TypeRegistry {
     this.idToType.clear();
     this.idToEnum.clear();
     this.enumInfoToId.clear();
-    distributedTypeRegistry.testClearRegistry();
+    this.distributedTypeRegistry.testClearRegistry();
   }
 
   public void testClearLocalTypeRegistry() {
@@ -86,17 +96,11 @@ public class TypeRegistry {
     this.localEnumIds.clear();
   }
 
-  public static boolean mayNeedDiskStore(GemFireCacheImpl cache) {
-    if (DISABLE_TYPE_REGISTRY) {
-      return false;
-    } else if (cache.hasPool()) {
-      return false;
-    } else {
-      return cache.getPdxPersistent();
-    }
+  public static boolean mayNeedDiskStore(InternalCache cache) {
+    return !DISABLE_TYPE_REGISTRY && !cache.hasPool() && cache.getPdxPersistent();
   }
 
-  public static String getPdxDiskStoreName(GemFireCacheImpl cache) {
+  public static String getPdxDiskStoreName(InternalCache cache) {
     if (!mayNeedDiskStore(cache)) {
       return null;
     } else {
@@ -109,9 +113,9 @@ public class TypeRegistry {
   }
 
   public void initialize() {
-    if (!cache.getPdxPersistent() || cache.getPdxDiskStore() == null
-        || cache.findDiskStore(cache.getPdxDiskStore()) != null) {
-      distributedTypeRegistry.initialize();
+    if (!this.cache.getPdxPersistent() || this.cache.getPdxDiskStore() == null
+        || this.cache.findDiskStore(this.cache.getPdxDiskStore()) != null) {
+      this.distributedTypeRegistry.initialize();
     }
   }
 
@@ -146,40 +150,39 @@ public class TypeRegistry {
     return null;
   }
 
-
-  public PdxType getExistingType(Object o) {
+  PdxType getExistingType(Object o) {
     return getExistingTypeForClass(o.getClass());
   }
 
-  public PdxType getExistingTypeForClass(Class<?> c) {
-    return this.localTypeIds.get(c);
+  public PdxType getExistingTypeForClass(Class<?> aClass) {
+    return this.localTypeIds.get(aClass);
   }
 
   /**
    * Returns the local type that should be used for deserializing blobs of the given typeId for the
    * given local class. Returns null if no such local type exists.
    */
-  public UnreadPdxType getExistingTypeForClass(Class<?> c, int typeId) {
-    Map<Integer, UnreadPdxType> m = this.localTypeIdMaps.get(c);
-    if (m != null) {
-      return m.get(typeId);
+  UnreadPdxType getExistingTypeForClass(Class<?> aClass, int typeId) {
+    Map<Integer, UnreadPdxType> map = this.localTypeIdMaps.get(aClass);
+    if (map != null) {
+      return map.get(typeId);
     } else {
       return null;
     }
   }
 
-  public void defineUnreadType(Class<?> c, UnreadPdxType unreadPdxType) {
+  void defineUnreadType(Class<?> aClass, UnreadPdxType unreadPdxType) {
     int typeId = unreadPdxType.getTypeId();
     // even though localTypeIdMaps is copy on write we need to sync it
     // during write to safely update the nested map.
     // We make the nested map copy-on-write so that readers don't need to sync.
     synchronized (this.localTypeIdMaps) {
-      Map<Integer, UnreadPdxType> m = this.localTypeIdMaps.get(c);
-      if (m == null) {
-        m = new CopyOnWriteHashMap<Integer, UnreadPdxType>();
-        this.localTypeIdMaps.put(c, m);
+      Map<Integer, UnreadPdxType> map = this.localTypeIdMaps.get(aClass);
+      if (map == null) {
+        map = new CopyOnWriteHashMap<Integer, UnreadPdxType>();
+        this.localTypeIdMaps.put(aClass, map);
       }
-      m.put(typeId, unreadPdxType);
+      map.put(typeId, unreadPdxType);
     }
   }
 
@@ -189,11 +192,12 @@ public class TypeRegistry {
   public int defineType(PdxType newType) {
     Integer existingId = this.typeToId.get(newType);
     if (existingId != null) {
-      int eid = existingId.intValue();
+      int eid = existingId;
       newType.setTypeId(eid);
       return eid;
     }
-    int id = distributedTypeRegistry.defineType(newType);
+
+    int id = this.distributedTypeRegistry.defineType(newType);
     newType.setTypeId(id);
     PdxType oldType = this.idToType.get(id);
     if (oldType == null) {
@@ -228,7 +232,7 @@ public class TypeRegistry {
   /**
    * Create a type id for a type that was generated locally.
    */
-  public PdxType defineLocalType(Object o, PdxType newType) {
+  PdxType defineLocalType(Object o, PdxType newType) {
     if (o != null) {
       PdxType t = getExistingType(o);
       if (t != null) {
@@ -244,7 +248,6 @@ public class TypeRegistry {
     return newType;
   }
 
-
   /**
    * Test hook that returns the most recently allocated type id
    * 
@@ -253,31 +256,32 @@ public class TypeRegistry {
    * @return the most recently allocated type id
    */
   public int getLastAllocatedTypeId() {
-    return distributedTypeRegistry.getLastAllocatedTypeId();
+    return this.distributedTypeRegistry.getLastAllocatedTypeId();
   }
 
   public TypeRegistration getTypeRegistration() {
-    return distributedTypeRegistry;
+    return this.distributedTypeRegistry;
   }
 
   public void gatewaySenderStarted(GatewaySender gatewaySender) {
-    if (distributedTypeRegistry != null) {
-      distributedTypeRegistry.gatewaySenderStarted(gatewaySender);
+    if (this.distributedTypeRegistry != null) {
+      this.distributedTypeRegistry.gatewaySenderStarted(gatewaySender);
     }
   }
 
   public void creatingDiskStore(DiskStore dsi) {
-    if (cache.getPdxDiskStore() != null && dsi.getName().equals(cache.getPdxDiskStore())) {
-      distributedTypeRegistry.initialize();
+    if (this.cache.getPdxDiskStore() != null
+        && dsi.getName().equals(this.cache.getPdxDiskStore())) {
+      this.distributedTypeRegistry.initialize();
     }
   }
 
   public void creatingPersistentRegion() {
-    distributedTypeRegistry.creatingPersistentRegion();
+    this.distributedTypeRegistry.creatingPersistentRegion();
   }
 
   public void creatingPool() {
-    distributedTypeRegistry.creatingPool();
+    this.distributedTypeRegistry.creatingPool();
   }
 
   // test hook
@@ -285,23 +289,24 @@ public class TypeRegistry {
     this.localTypeIds.remove(o.getClass());
   }
 
-  public PdxUnreadData getUnreadData(Object o) {
+  PdxUnreadData getUnreadData(Object o) {
     return this.unreadDataMap.get(o);
   }
 
-  public void putUnreadData(Object o, PdxUnreadData ud) {
+  void putUnreadData(Object o, PdxUnreadData ud) {
     this.unreadDataMap.put(o, ud);
   }
 
-  private static final AtomicReference<PdxSerializer> pdxSerializer =
-      new AtomicReference<PdxSerializer>(null);
-  private static final AtomicReference<AutoSerializableManager> asm =
-      new AtomicReference<AutoSerializableManager>(null);
+  private static final AtomicReference<PdxSerializer> pdxSerializer = new AtomicReference<>(null);
+
+  private static final AtomicReference<AutoSerializableManager> asm = new AtomicReference<>(null);
+
   /**
    * To fix bug 45116 we want any attempt to get the PdxSerializer after it has been closed to fail
    * with an exception.
    */
   private static volatile boolean open = false;
+
   /**
    * If the pdxSerializer is ever set to a non-null value then set this to true. It gets reset to
    * false when init() is called. This was added to fix bug 45116.
@@ -357,10 +362,10 @@ public class TypeRegistry {
     if (v != null) {
       Integer id = this.localEnumIds.get(v);
       if (id != null) {
-        result = id.intValue();
+        result = id;
       } else {
-        result = distributedTypeRegistry.getEnumId(v);
-        id = Integer.valueOf(result);
+        result = this.distributedTypeRegistry.getEnumId(v);
+        id = valueOf(result);
         this.localEnumIds.put(v, id);
         EnumInfo ei = new EnumInfo(v);
         this.idToEnum.put(id, ei);
@@ -385,9 +390,9 @@ public class TypeRegistry {
   public int defineEnum(EnumInfo newInfo) {
     Integer existingId = this.enumInfoToId.get(newInfo);
     if (existingId != null) {
-      return existingId.intValue();
+      return existingId;
     }
-    int id = distributedTypeRegistry.defineEnum(newInfo);
+    int id = this.distributedTypeRegistry.defineEnum(newInfo);
     EnumInfo oldInfo = this.idToEnum.get(id);
     if (oldInfo == null) {
       this.idToEnum.put(id, newInfo);
@@ -444,21 +449,20 @@ public class TypeRegistry {
    * server side distributed system is cycled
    */
   public void clear() {
-    if (distributedTypeRegistry.isClient()) {
-      idToType.clear();
-      typeToId.clear();
-      localTypeIds.clear();
-      localTypeIdMaps.clear();
-      unreadDataMap.clear();
-      idToEnum.clear();
-      enumInfoToId.clear();
-      localEnumIds.clear();
+    if (this.distributedTypeRegistry.isClient()) {
+      this.idToType.clear();
+      this.typeToId.clear();
+      this.localTypeIds.clear();
+      this.localTypeIdMaps.clear();
+      this.unreadDataMap.clear();
+      this.idToEnum.clear();
+      this.enumInfoToId.clear();
+      this.localEnumIds.clear();
       AutoSerializableManager autoSerializer = getAutoSerializableManager();
       if (autoSerializer != null) {
         autoSerializer.resetCachedTypes();
       }
     }
-
   }
 
   /**
@@ -467,7 +471,7 @@ public class TypeRegistry {
    * @return the types
    */
   public Map<Integer, PdxType> typeMap() {
-    return distributedTypeRegistry.types();
+    return this.distributedTypeRegistry.types();
   }
 
   /**
@@ -476,7 +480,7 @@ public class TypeRegistry {
    * @return the enums
    */
   public Map<Integer, EnumInfo> enumMap() {
-    return distributedTypeRegistry.enums();
+    return this.distributedTypeRegistry.enums();
   }
 
   /**
@@ -487,8 +491,8 @@ public class TypeRegistry {
    * @return PdxType having the field or null if not found
    * 
    */
-  public PdxType getPdxTypeForField(String fieldName, String className) {
-    return distributedTypeRegistry.getPdxTypeForField(fieldName, className);
+  PdxType getPdxTypeForField(String fieldName, String className) {
+    return this.distributedTypeRegistry.getPdxTypeForField(fieldName, className);
   }
 
   public void addImportedType(int typeId, PdxType importedType) {
@@ -522,10 +526,10 @@ public class TypeRegistry {
    * Get the size of the the type registry in this local member
    */
   public int getLocalSize() {
-    int result = distributedTypeRegistry.getLocalSize();
+    int result = this.distributedTypeRegistry.getLocalSize();
     if (result == 0) {
       // If this is the client, go ahead and return the number of cached types we have
-      return idToType.size();
+      return this.idToType.size();
     }
     return result;
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/test/java/org/apache/geode/cache/query/CacheUtils.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/CacheUtils.java b/geode-core/src/test/java/org/apache/geode/cache/query/CacheUtils.java
index da79ae0..e5fa846 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/CacheUtils.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/CacheUtils.java
@@ -12,49 +12,52 @@
  * or implied. See the License for the specific language governing permissions and limitations under
  * the License.
  */
-/*
- * Utils.java
- *
- * Created on March 8, 2005, 4:16 PM
- */
 package org.apache.geode.cache.query;
 
+import static org.apache.geode.distributed.ConfigurationProperties.*;
+import static org.junit.Assert.*;
+
+import java.util.Iterator;
+import java.util.Properties;
+import java.util.Set;
+
+import org.apache.logging.log4j.Logger;
+
 import org.apache.geode.LogWriter;
-import org.apache.geode.cache.*;
+import org.apache.geode.cache.AttributesFactory;
+import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.CacheTransactionManager;
+import org.apache.geode.cache.Region;
+import org.apache.geode.cache.RegionAttributes;
+import org.apache.geode.cache.Scope;
 import org.apache.geode.cache.query.types.CollectionType;
 import org.apache.geode.cache.query.types.ObjectType;
 import org.apache.geode.distributed.DistributedSystem;
 import org.apache.geode.internal.cache.GemFireCacheImpl;
+import org.apache.geode.internal.cache.InternalCache;
+import org.apache.geode.internal.logging.LogService;
 
-import java.util.Iterator;
-import java.util.Properties;
-import java.util.Set;
+import org.apache.geode.cache.TimeoutException;
+import org.apache.geode.cache.CacheWriterException;
+import org.apache.geode.cache.GatewayException;
+import org.apache.geode.cache.RegionExistsException;
 
-import static org.apache.geode.distributed.ConfigurationProperties.MCAST_PORT;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-/**
- * 
- */
 public class CacheUtils {
+  private static final Logger logger = LogService.getLogger();
 
-  static Properties props = new Properties();
-  static DistributedSystem ds;
-  static volatile Cache cache;
+  private static Properties props = new Properties();
+  private static DistributedSystem ds;
+  static volatile InternalCache cache;
   static QueryService qs;
   static {
-    try {
-      init();
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
+    init();
   }
 
-  static void init() throws Exception {
+  private static void init()
+      throws TimeoutException, CacheWriterException, GatewayException, RegionExistsException {
     if (GemFireCacheImpl.getInstance() == null) {
       props.setProperty(MCAST_PORT, "0");
-      cache = new CacheFactory(props).create();
+      cache = (InternalCache) new CacheFactory(props).create();
     } else {
       cache = GemFireCacheImpl.getInstance();
     }
@@ -62,19 +65,19 @@ public class CacheUtils {
     qs = cache.getQueryService();
   }
 
-  public static Cache getCache() {
+  public static InternalCache getCache() {
     return cache;
   }
 
   public static void startCache() {
     try {
       if (cache.isClosed()) {
-        cache = new CacheFactory(props).create();
+        cache = (InternalCache) new CacheFactory(props).create();
         ds = cache.getDistributedSystem();
         qs = cache.getQueryService();
       }
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
   }
 
@@ -84,7 +87,7 @@ public class CacheUtils {
         cache.close();
       }
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
   }
 
@@ -93,11 +96,11 @@ public class CacheUtils {
       if (!cache.isClosed()) {
         cache.close();
       }
-      cache = new CacheFactory(props).create();
+      cache = (InternalCache) new CacheFactory(props).create();
       ds = cache.getDistributedSystem();
       qs = cache.getQueryService();
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
   }
 
@@ -109,23 +112,20 @@ public class CacheUtils {
         attributesFactory.setScope(scope);
       }
       RegionAttributes regionAttributes = attributesFactory.create();
-      Region region = cache.createRegion(regionName, regionAttributes);
-      return region;
+      return cache.createRegion(regionName, regionAttributes);
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
-    return null;
   }
 
+  // TODO: paramter flag is unused
   public static Region createRegion(String regionName, RegionAttributes regionAttributes,
       boolean flag) {
     try {
-      Region region = cache.createRegion(regionName, regionAttributes);
-      return region;
+      return cache.createRegion(regionName, regionAttributes);
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
-    return null;
   }
 
   public static Region createRegion(String regionName, Class valueConstraint) {
@@ -139,12 +139,10 @@ public class CacheUtils {
       attributesFactory.setValueConstraint(valueConstraint);
       attributesFactory.setIndexMaintenanceSynchronous(indexMaintenanceSynchronous);
       RegionAttributes regionAttributes = attributesFactory.create();
-      Region region = cache.createRegion(regionName, regionAttributes);
-      return region;
+      return cache.createRegion(regionName, regionAttributes);
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
-    return null;
   }
 
   public static Region createRegion(Region parentRegion, String regionName, Class valueConstraint) {
@@ -153,12 +151,10 @@ public class CacheUtils {
       if (valueConstraint != null)
         attributesFactory.setValueConstraint(valueConstraint);
       RegionAttributes regionAttributes = attributesFactory.create();
-      Region region = parentRegion.createSubregion(regionName, regionAttributes);
-      return region;
+      return parentRegion.createSubregion(regionName, regionAttributes);
     } catch (Exception e) {
-      e.printStackTrace();
+      throw new AssertionError(e);
     }
-    return null;
   }
 
   public static Region getRegion(String regionPath) {
@@ -179,10 +175,7 @@ public class CacheUtils {
   }
 
   public static void log(Object message) {
-    Cache cache = GemFireCacheImpl.getInstance();
-    if (cache != null) {
-      cache.getLogger().fine(message.toString());
-    }
+    logger.debug(message);
   }
 
   public static CacheTransactionManager getCacheTranxnMgr() {
@@ -190,16 +183,19 @@ public class CacheUtils {
   }
 
   public static void compareResultsOfWithAndWithoutIndex(SelectResults[][] r, Object test) {
-    Set set1 = null;
-    Set set2 = null;
-    Iterator itert1 = null;
-    Iterator itert2 = null;
-    ObjectType type1, type2;
-    for (int j = 0; j < r.length; j++) {
-      CollectionType collType1 = r[j][0].getCollectionType();
-      CollectionType collType2 = r[j][1].getCollectionType();
+    Set set1;
+    Set set2;
+    Iterator itert1;
+    Iterator itert2;
+    ObjectType type1;
+    ObjectType type2;
+
+    for (final SelectResults[] selectResults : r) {
+      CollectionType collType1 = selectResults[0].getCollectionType();
+      CollectionType collType2 = selectResults[1].getCollectionType();
       type1 = collType1.getElementType();
       type2 = collType2.getElementType();
+
       if (collType1.getSimpleClassName().equals(collType2.getSimpleClassName())) {
         log("Both SelectResults are of the same Type i.e.--> " + collType1);
       } else {
@@ -208,6 +204,7 @@ public class CacheUtils {
             "FAILED:Select results Collection Type is different in both the cases. CollectionType1="
                 + collType1 + " CollectionType2=" + collType2);
       }
+
       if (type1.equals(type2)) {
         log("Both SelectResults have same element Type i.e.--> " + type1);
       } else {
@@ -224,15 +221,17 @@ public class CacheUtils {
         fail("FAILED:SelectResults Collection Type is different in both the cases. CollType1="
             + collType1 + " CollType2=" + collType2);
       }
-      if (r[j][0].size() == r[j][1].size()) {
-        log("Both SelectResults are of Same Size i.e.  Size= " + r[j][1].size());
+
+      if (selectResults[0].size() == selectResults[1].size()) {
+        log("Both SelectResults are of Same Size i.e.  Size= " + selectResults[1].size());
       } else {
-        fail("FAILED:SelectResults size is different in both the cases. Size1=" + r[j][0].size()
-            + " Size2 = " + r[j][1].size());
+        fail("FAILED:SelectResults size is different in both the cases. Size1="
+            + selectResults[0].size() + " Size2 = " + selectResults[1].size());
       }
-      set2 = ((r[j][1]).asSet());
-      set1 = ((r[j][0]).asSet());
-      // boolean pass = true;
+
+      set2 = selectResults[1].asSet();
+      set1 = selectResults[0].asSet();
+
       itert1 = set1.iterator();
       while (itert1.hasNext()) {
         Object p1 = itert1.next();
@@ -241,6 +240,7 @@ public class CacheUtils {
         boolean exactMatch = false;
         while (itert2.hasNext()) {
           Object p2 = itert2.next();
+
           if (p1 instanceof Struct) {
             Object[] values1 = ((Struct) p1).getFieldValues();
             Object[] values2 = ((Struct) p2).getFieldValues();
@@ -248,11 +248,11 @@ public class CacheUtils {
             boolean elementEqual = true;
             for (int i = 0; i < values1.length; ++i) {
               elementEqual =
-                  elementEqual && ((values1[i] == values2[i]) || values1[i].equals(values2[i]));
+                  elementEqual && (values1[i] == values2[i] || values1[i].equals(values2[i]));
             }
             exactMatch = elementEqual;
           } else {
-            exactMatch = (p2 == p1) || p2.equals(p1);
+            exactMatch = p2 == p1 || p2.equals(p1);
           }
           if (exactMatch) {
             break;
@@ -260,7 +260,7 @@ public class CacheUtils {
         }
         if (!exactMatch) {
           fail(
-              "Atleast one element in the pair of SelectResults supposedly identical, is not equal ");
+              "At least one element in the pair of SelectResults supposedly identical, is not equal");
         }
       }
     }
@@ -268,14 +268,17 @@ public class CacheUtils {
 
   public static boolean compareResultsOfWithAndWithoutIndex(SelectResults[][] r) {
     boolean ok = true;
-    Set set1 = null;
-    Set set2 = null;
-    Iterator itert1 = null;
-    Iterator itert2 = null;
-    ObjectType type1, type2;
-    outer: for (int j = 0; j < r.length; j++) {
-      CollectionType collType1 = r[j][0].getCollectionType();
-      CollectionType collType2 = r[j][1].getCollectionType();
+    Set set1;
+    Set set2;
+    Iterator itert1;
+    Iterator itert2;
+    ObjectType type1;
+    ObjectType type2;
+
+    // TODO: eliminate loop labels
+    outer: for (final SelectResults[] aR : r) {
+      CollectionType collType1 = aR[0].getCollectionType();
+      CollectionType collType2 = aR[1].getCollectionType();
       type1 = collType1.getElementType();
       type2 = collType2.getElementType();
 
@@ -288,6 +291,7 @@ public class CacheUtils {
         ok = false;
         break;
       }
+
       if (type1.equals(type2)) {
         log("Both SelectResults have same element Type i.e.--> " + type1);
       } else {
@@ -308,18 +312,20 @@ public class CacheUtils {
         ok = false;
         break;
       }
-      if (r[j][0].size() == r[j][1].size()) {
-        log("Both SelectResults are of Same Size i.e.  Size= " + r[j][1].size());
+
+      if (aR[0].size() == aR[1].size()) {
+        log("Both SelectResults are of Same Size i.e.  Size= " + aR[1].size());
       } else {
         // test.fail("FAILED:SelectResults size is different in both the cases. Size1=" +
         // r[j][0].size() + " Size2 = " + r[j][1].size());
         ok = false;
         break;
       }
-      set2 = (((SelectResults) r[j][1]).asSet());
-      set1 = (((SelectResults) r[j][0]).asSet());
-      boolean pass = true;
+
+      set2 = aR[1].asSet();
+      set1 = aR[0].asSet();
       itert1 = set1.iterator();
+
       while (itert1.hasNext()) {
         Object p1 = itert1.next();
         itert2 = set2.iterator();
@@ -330,7 +336,6 @@ public class CacheUtils {
           if (p1 instanceof Struct) {
             Object[] values1 = ((Struct) p1).getFieldValues();
             Object[] values2 = ((Struct) p2).getFieldValues();
-            // test.assertIndexDetailsEquals(values1.length, values2.length);
             if (values1.length != values2.length) {
               ok = false;
               break outer;
@@ -339,22 +344,20 @@ public class CacheUtils {
             for (int i = 0; i < values1.length; ++i) {
               if (values1[i] != null) {
                 elementEqual =
-                    elementEqual && ((values1[i] == values2[i]) || values1[i].equals(values2[i]));
+                    elementEqual && (values1[i] == values2[i] || values1[i].equals(values2[i]));
               } else {
-                elementEqual = elementEqual && ((values1[i] == values2[i]));
+                elementEqual = elementEqual && values1[i] == values2[i];
               }
             }
             exactMatch = elementEqual;
           } else {
-            exactMatch = (p2 == p1) || p2.equals(p1);
+            exactMatch = p2 == p1 || p2.equals(p1);
           }
           if (exactMatch) {
             break;
           }
         }
         if (!exactMatch) {
-          // test.fail("Atleast one element in the pair of SelectResults supposedly identical, is
-          // not equal ");
           ok = false;
           break outer;
         }

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/test/java/org/apache/geode/cache/query/functional/FunctionJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/functional/FunctionJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/functional/FunctionJUnitTest.java
index 8bb2610..b8451bd 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/functional/FunctionJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/functional/FunctionJUnitTest.java
@@ -80,14 +80,14 @@ public class FunctionJUnitTest {
     ExecutionContext context = null;
     for (int i = 0; i < 6; i++) {
       CompiledValue cf = new CompiledFunction(cvArr[i], OQLLexerTokenTypes.LITERAL_nvl);
-      StringBuffer clauseBuffer = new StringBuffer();
+      StringBuilder clauseBuffer = new StringBuilder();
       cf.generateCanonicalizedExpression(clauseBuffer, context);
       if (!clauseBuffer.toString().equals("NVL" + canonicalizedArgs[i])) {
         fail("Canonicalization not done properly");
       }
 
       cf = new CompiledFunction(cvArr[i], OQLLexerTokenTypes.LITERAL_element);
-      clauseBuffer = new StringBuffer();
+      clauseBuffer = new StringBuilder();
       cf.generateCanonicalizedExpression(clauseBuffer, context);
       if (!clauseBuffer.toString().equals("ELEMENT" + canonicalizedArgs[i])) {
         fail("Canonicalization not done properly");

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledAggregateFunctionJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledAggregateFunctionJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledAggregateFunctionJUnitTest.java
index f3bcc02..38d3f6f 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledAggregateFunctionJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledAggregateFunctionJUnitTest.java
@@ -42,19 +42,20 @@ import org.apache.geode.cache.query.internal.aggregate.MaxMin;
 import org.apache.geode.cache.query.internal.aggregate.Sum;
 import org.apache.geode.cache.query.internal.aggregate.SumDistinct;
 import org.apache.geode.cache.query.internal.aggregate.SumDistinctPRQueryNode;
+import org.apache.geode.internal.cache.InternalCache;
 import org.apache.geode.test.junit.categories.UnitTest;
 
 @Category(UnitTest.class)
 public class CompiledAggregateFunctionJUnitTest {
 
   private Mockery context;
-  private Cache cache;
+  private InternalCache cache;
   private List bucketList;
 
   @Before
   public void setUp() throws Exception {
     context = new Mockery();
-    cache = context.mock(Cache.class);
+    cache = context.mock(InternalCache.class);
     bucketList = new ArrayList();
     bucketList.add(Integer.valueOf(1));
   }

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledJunctionInternalsJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledJunctionInternalsJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledJunctionInternalsJUnitTest.java
index 21745d5..504d1fe 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledJunctionInternalsJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/internal/CompiledJunctionInternalsJUnitTest.java
@@ -84,7 +84,7 @@ public class CompiledJunctionInternalsJUnitTest {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     try {
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -144,7 +144,7 @@ public class CompiledJunctionInternalsJUnitTest {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     try {
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -242,7 +242,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       // qs.createIndex("statusIndex",
       // IndexType.FUNCTIONAL,"status","/portfolio");
@@ -331,7 +331,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio p");
       Iterator iter = list.iterator();
       while (iter.hasNext()) {
@@ -395,7 +395,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       // qs.createIndex("statusIndex",
       // IndexType.FUNCTIONAL,"status","/portfolio");
@@ -466,7 +466,7 @@ public class CompiledJunctionInternalsJUnitTest {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     try {
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -512,7 +512,7 @@ public class CompiledJunctionInternalsJUnitTest {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     try {
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -580,7 +580,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -636,7 +636,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       // qs.createIndex("statusIndex",
       // IndexType.FUNCTIONAL,"status","/portfolio");
@@ -705,7 +705,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -774,7 +774,7 @@ public class CompiledJunctionInternalsJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions,/employees e");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -844,7 +844,7 @@ public class CompiledJunctionInternalsJUnitTest {
       List list =
           compiler.compileFromClause("/portfolio p, p.positions,/employees e, /portfolio1 p1");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       qs.createIndex("statusIndex", IndexType.FUNCTIONAL, "status", "/portfolio");
       qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
@@ -945,7 +945,7 @@ public class CompiledJunctionInternalsJUnitTest {
       List list = compiler.compileFromClause(
           "/portfolio p, p.positions,/employees e, /employees1 e1, /portfolio p1");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
 
       // qs.createIndex("statusIndex",
       // IndexType.FUNCTIONAL,"status","/portfolio");
@@ -2861,7 +2861,7 @@ public class CompiledJunctionInternalsJUnitTest {
   private void bindIteratorsAndCreateIndex(ExecutionContext context) throws Exception {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     qs.createIndex("idIndex", IndexType.FUNCTIONAL, "ID", "/portfolio");
     Iterator iter = list.iterator();
     while (iter.hasNext()) {

http://git-wip-us.apache.org/repos/asf/geode/blob/0d0bf253/geode-core/src/test/java/org/apache/geode/cache/query/internal/ExecutionContextJUnitTest.java
----------------------------------------------------------------------
diff --git a/geode-core/src/test/java/org/apache/geode/cache/query/internal/ExecutionContextJUnitTest.java b/geode-core/src/test/java/org/apache/geode/cache/query/internal/ExecutionContextJUnitTest.java
index a15bae0..1112ee2 100644
--- a/geode-core/src/test/java/org/apache/geode/cache/query/internal/ExecutionContextJUnitTest.java
+++ b/geode-core/src/test/java/org/apache/geode/cache/query/internal/ExecutionContextJUnitTest.java
@@ -72,7 +72,7 @@ public class ExecutionContextJUnitTest {
     QCompiler compiler = new QCompiler();
     List list = compiler.compileFromClause("/portfolio p, p.positions");
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     try {
       Iterator iter = list.iterator();
       while (iter.hasNext()) {
@@ -100,7 +100,7 @@ public class ExecutionContextJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
       Iterator iter = list.iterator();
       int i = 0;
       while (iter.hasNext()) {
@@ -128,7 +128,7 @@ public class ExecutionContextJUnitTest {
       QCompiler compiler = new QCompiler();
       List list = compiler.compileFromClause("/portfolio p, p.positions");
       ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-      context.newScope(context.assosciateScopeID());
+      context.newScope(context.associateScopeID());
       Iterator iter = list.iterator();
       int i = 0;
       CompiledIteratorDef iterDef = null;
@@ -143,7 +143,7 @@ public class ExecutionContextJUnitTest {
             rIter.getIndexInternalID().equals("index_iter" + i));
       }
       Set temp = new HashSet();
-      context.computeUtlimateDependencies(iterDef, temp);
+      context.computeUltimateDependencies(iterDef, temp);
       String regionPath = context
           .getRegionPathForIndependentRuntimeIterator((RuntimeIterator) temp.iterator().next());
       if (!(regionPath != null && regionPath.equals("/portfolio"))) {
@@ -166,7 +166,7 @@ public class ExecutionContextJUnitTest {
         "/portfolio p, p.positions, p.addreses addrs, addrs.collection1 coll1, /dummy d1, d1.collection2 d2");
     RuntimeIterator indItr = null;
     ExecutionContext context = new QueryExecutionContext(null, CacheUtils.getCache());
-    context.newScope(context.assosciateScopeID());
+    context.newScope(context.associateScopeID());
     int i = 0;
     List checkList = new ArrayList();
     try {