You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@trafodion.apache.org by sa...@apache.org on 2016/07/19 16:09:16 UTC

[03/13] incubator-trafodion git commit: TRAFODION-2016 support apache hbase1.1 & cdh5.5

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/pom.xml.cdh55
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/pom.xml.cdh55 b/core/sqf/src/seatrans/hbase-trx/pom.xml.cdh55
new file mode 100755
index 0000000..cf60e55
--- /dev/null
+++ b/core/sqf/src/seatrans/hbase-trx/pom.xml.cdh55
@@ -0,0 +1,332 @@
+<?xml version="1.0"?>
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <!--
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+-->
+  <repositories>
+    <repository>
+      <id>cloudera</id>
+      <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
+    </repository>
+  </repositories>
+
+  <properties>
+    <hadoop.version>2.6.0</hadoop.version>
+    <hbase.version>1.0.0-cdh5.5.1</hbase.version>
+    <protobuf.version>2.5.0</protobuf.version>
+    <protocVersion>2.5.0</protocVersion>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <java.version>1.7</java.version>
+    <trx-suffix>cdh5_5</trx-suffix>
+  </properties>
+
+
+  <groupId>org.apache</groupId>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hbase-trx-cdh5_5</artifactId>
+  <version>${env.TRAFODION_VER}</version>
+  <name>HBase - Trx</name>
+  <description>Trx of HBase usage</description>
+
+
+  <build>
+    <defaultGoal>package</defaultGoal>
+      <!-- allow for different build targets with separate classes directories and
+           output jars, used for Java 6 compile for now -->
+      <outputDirectory>${project.build.directory}/classes_${trx-suffix}</outputDirectory>
+      <finalName>${project.artifactId}-${project.version}</finalName>
+      <!-- Some plugins (javadoc for example) can be used in the normal build- and the site phase.
+           These plugins inherit their options from the <reporting> section below. These settings
+           can be overwritten here. -->
+    <plugins>
+      <plugin>
+        <!--Make it so assembly:single does nothing in here-->
+        <artifactId>maven-assembly-plugin</artifactId>
+        <version>2.2-beta-5</version>
+        <configuration>
+          <skipAssembly>true</skipAssembly>
+        </configuration>
+      </plugin>
+        <plugin>
+            <artifactId>maven-surefire-plugin</artifactId>
+            <version>2.5</version>
+            <configuration>
+                <!-- Have to set the groups here because we only do
+    split tests in this package, so groups on live in this module -->
+                <groups>${surefire.firstPartGroups}</groups>
+            </configuration>
+        </plugin>
+        <!-- Make a jar and put the sources in the jar -->
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-source-plugin</artifactId>
+          <version>2.1.1</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-jar-plugin</artifactId>
+          <configuration>
+            <archive>
+               <manifestFile>hbase-trx.jar.versions</manifestFile>
+            </archive>
+          </configuration>
+        </plugin>
+    </plugins>
+  </build>
+
+
+
+  <dependencies>
+    <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-common</artifactId>
+        <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-protocol</artifactId>
+        <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-client</artifactId>
+        <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-thrift</artifactId>
+      <version>${hbase.version}</version>
+    </dependency>
+    <dependency>
+        <groupId>org.apache.hbase</groupId>
+        <artifactId>hbase-testing-util</artifactId>
+        <scope>test</scope>
+      <version>${hbase.version}</version>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.thrift</groupId>
+      <artifactId>libthrift</artifactId>
+      <version>0.9.1</version>
+    </dependency>
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+      <version>1.1.3</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.zookeeper</groupId>
+      <artifactId>zookeeper</artifactId>
+      <version>3.4.5</version>
+    </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>2.5.0</version>
+    </dependency>
+ </dependencies>
+ <profiles>
+     <!-- Skip the tests in this module -->
+     <profile>
+         <id>skipTrxTests</id>
+         <activation>
+             <property>
+                 <name>skipTrxTests</name>
+             </property>
+         </activation>
+         <properties>
+             <surefire.skipFirstPart>true</surefire.skipFirstPart>
+             <surefire.skipSecondPart>true</surefire.skipSecondPart>
+         </properties>
+     </profile>
+
+     <!-- Profiles for building against different hadoop versions -->
+     <!-- There are a lot of common dependencies used here, should investigate
+if we can combine these profiles somehow -->
+     <!-- profile against Hadoop 1.1.x: This is the default. It has to have the same
+  activation property as the parent Hadoop 1.1.x profile to make sure it gets run at
+  the same time. -->
+     <profile>
+         <id>hadoop-1.1</id>
+         <activation>
+             <property>
+            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
+            <!--h1--><name>hadoop.profile</name><value>1.1</value>
+             </property>
+         </activation>
+         <dependencies>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-core</artifactId>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-test</artifactId>
+             </dependency>
+         </dependencies>
+     </profile>
+     <!--
+       profile for building against Hadoop 2.0.0-alpha. Activate using:
+        mvn -Dhadoop.profile=2.0
+     -->
+     <profile>
+         <id>hadoop-2.0</id>
+         <activation>
+             <property>
+            <!--Below formatting for dev-support/generate-hadoopX-poms.sh-->
+            <!--h2--><name>!hadoop.profile</name>
+             </property>
+         </activation>
+         <dependencies>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-mapreduce-client-core</artifactId>
+                 <version>${hadoop.version}</version>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-common</artifactId>
+                 <version>${hadoop.version}</version>
+             </dependency>
+         </dependencies>
+         <build>
+             <plugins>
+                 <plugin>
+                     <artifactId>maven-dependency-plugin</artifactId>
+                     <executions>
+                         <execution>
+                             <id>create-mrapp-generated-classpath</id>
+                             <phase>generate-test-resources</phase>
+                             <goals>
+                                 <goal>build-classpath</goal>
+                             </goals>
+                             <configuration>
+                                 <!-- needed to run the unit test for DS to generate
+                                 the required classpath that is required in the env
+                                 of the launch container in the mini mr/yarn cluster
+                                 -->
+                                 <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
+                             </configuration>
+                         </execution>
+                     </executions>
+                 </plugin>
+             </plugins>
+         </build>
+     </profile>
+     <!--
+       profile for building against Hadoop 3.0.x. Activate using:
+        mvn -Dhadoop.profile=3.0
+     -->
+     <profile>
+         <id>hadoop-3.0</id>
+         <activation>
+             <property>
+                 <name>hadoop.profile</name>
+                 <value>3.0</value>
+             </property>
+         </activation>
+         <properties>
+             <hadoop.version>3.0-SNAPSHOT</hadoop.version>
+         </properties>
+         <dependencies>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-common</artifactId>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-annotations</artifactId>
+             </dependency>
+             <dependency>
+                 <groupId>org.apache.hadoop</groupId>
+                 <artifactId>hadoop-minicluster</artifactId>
+             </dependency>
+         </dependencies>
+         <build>
+             <plugins>
+                 <plugin>
+                     <artifactId>maven-dependency-plugin</artifactId>
+                     <executions>
+                         <execution>
+                             <id>create-mrapp-generated-classpath</id>
+                             <phase>generate-test-resources</phase>
+                             <goals>
+                                 <goal>build-classpath</goal>
+                             </goals>
+                             <configuration>
+                                 <!-- needed to run the unit test for DS to generate
+                                 the required classpath that is required in the env
+                                 of the launch container in the mini mr/yarn cluster
+                                 -->
+                                 <outputFile>${project.build.directory}/test-classes/mrapp-generated-classpath</outputFile>
+                             </configuration>
+                         </execution>
+                     </executions>
+                 </plugin>
+             </plugins>
+         </build>
+     </profile>
+    <profile>
+      <id>compile-protobuf</id>
+      <activation>
+        <property>
+          <name>compile-protobuf</name>
+        </property>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-maven-plugins</artifactId>
+            <executions>
+              <execution>
+                <id>compile-protoc</id>
+                <phase>generate-sources</phase>
+                <goals>
+                  <goal>protoc</goal>
+                </goals>
+                <configuration>
+                  <protobuf.version>2.5.0</protobuf.version>
+                  <protocVersion>2.5.0</protocVersion>
+                  <imports>
+                    <param>${basedir}/src/main/protobuf</param>
+                    <param>${basedir}/hbase-protocol/src/main/protobuf</param>
+                  </imports>
+                  <source>
+                    <directory>${basedir}/src/main/protobuf</directory>
+                    <includes>
+                      <include>TrxRegion.proto</include>
+                      <include>SsccRegion.proto</include>
+                    </includes>
+                  </source>
+                  <output>${basedir}/src/main/java/</output>
+                </configuration>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+  </profiles>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/pp.awk
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/pp.awk b/core/sqf/src/seatrans/hbase-trx/pp.awk
index 44d8b66..50fd32f 100644
--- a/core/sqf/src/seatrans/hbase-trx/pp.awk
+++ b/core/sqf/src/seatrans/hbase-trx/pp.awk
@@ -19,29 +19,35 @@
 # under the License.
 #
 # @@@ END COPYRIGHT @@@
-##  The following example has been tested to work.
+#  The following example has been tested to work.
 #  If the file f1 contains the following, run this script
 #  as follows:    
 #   awk -f pp.awk -v distro=HDP2.3 f1 > f2
+#
 #  contents of  file f1:
-#  #ifdef HDP2.3
-#  this is HDP2.3 specific code
-#  #else
-#  this is other than HDP2.3 code
-#  #endif
+#   #ifdef HDP2.3
+#   this is HDP2.3 specific code
+#   #else
+#   this is anything other than  HDP2.3 code
+#   #endif
 #
-#  #ifndef HDP2.3
-#  this is other than HDP2.3 code partof ifndef
-#  #else
-#  this is HDP2.3 code part of ifndef else
+#   #ifndef HDP2.3
+#   this is anything other than HDP2.3 code partof ifndef construct
+#   #else
+#   this is HDP2.3 code part of ifndef else construct
 #   #endif
 #
-#  this is common code
-#  this is common code 2
+#   this is common code for all cases
 #
-#  #ifdef CDH1.0
-#  this is CDH specific code
-#  #endif
+#   #ifdef CDH5.4
+#   this is CDH5.4 specific code
+#   #endif
+#
+#   #ifdef CDH5.5 HDP2.3 HBASE1.1
+#   this is common CDH5.5 or HDP2.3 or HBASE1.1
+#   #else
+#   this is anything other than CDH5.5 or HDP2.3 or HBASE1.1 code
+#   #endif
 
 BEGIN{
 printline=1      #print current line or not.
@@ -99,7 +105,7 @@ ifndefpattern = "#ifndef"
 
   if($0 ~ ifdefpattern)
    {
-     if( $2 ~ distro)
+     if( $2 ~ distro || $3 ~ distro || $4 ~ distro)
      {
        printline = 0
        matchBegun = 1
@@ -112,7 +118,7 @@ ifndefpattern = "#ifndef"
    }
  if($0 ~ ifndefpattern)
   {
-    if($2 ~ distro)
+    if($2 ~ distro || $3 ~ distro || $4 ~ distro)
     {
       printline = 0
       unmatchBegun = 1

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/ClientScanner98.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/ClientScanner98.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/ClientScanner98.java.tmpl
index 81cec3f..4d9fad9 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/ClientScanner98.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/ClientScanner98.java.tmpl
@@ -468,7 +468,7 @@ public class ClientScanner98 extends AbstractClientScanner {
       }
       closed = true;
     }
-#ifndef CDH1.0
+#ifndef CDH5.4 CDH5.5
     @Override
     public boolean renewLease() {
       if (callable != null) {

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/PatchClientScanner.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/PatchClientScanner.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/PatchClientScanner.java.tmpl
index 9e1f597..42c9c5e 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/PatchClientScanner.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/PatchClientScanner.java.tmpl
@@ -69,7 +69,7 @@ public class PatchClientScanner extends ClientScanner {
     super(conf, scan, tableName, connection, rpcFactory, controllerFactory, pool, primaryOperationTimeout);
   }
 
-#ifndef HDP2.3
+#ifdef CDH5.4 HBASE1.0
   @Override
   protected void loadCache() throws IOException {
 	    Result[] values = null;

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/TrafParallelClientScanner.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/TrafParallelClientScanner.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/TrafParallelClientScanner.java.tmpl
index c8c75ae..3ff9a81 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/TrafParallelClientScanner.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/client/TrafParallelClientScanner.java.tmpl
@@ -190,7 +190,7 @@ public class TrafParallelClientScanner extends AbstractClientScanner implements
     // interrupt all running threads, don't wait for completion
     pool.shutdownNow();
   }
-#ifndef CDH1.0
+#ifndef CDH5.4 CDH5.5
   @Override
   public boolean renewLease() {return false;}//fake, never needed.
 #endif

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/SsccRegionEndpoint.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/SsccRegionEndpoint.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/SsccRegionEndpoint.java.tmpl
index 2a9a748..dbeee79 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/SsccRegionEndpoint.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/SsccRegionEndpoint.java.tmpl
@@ -3420,7 +3420,7 @@ CoprocessorService, Coprocessor {
     if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor: start");
     RegionCoprocessorEnvironment tmp_env = 
       (RegionCoprocessorEnvironment)env;
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
     this.m_Region = (HRegion)
        tmp_env.getRegion();
 #else
@@ -3582,7 +3582,7 @@ CoprocessorService, Coprocessor {
 
     try {
           if (LOG.isTraceEnabled()) LOG.trace("SsccRegionEndpoint coprocessor:  Trafodion Recovery:  Flushing cache in startRegionAfterRecovery " + m_Region.getRegionInfo().getRegionNameAsString());
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
           m_Region.flush(true);
 #else
           m_Region.flushcache();

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionEndpoint.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionEndpoint.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionEndpoint.java.tmpl
index 4f7b43c..a450db7 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionEndpoint.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionEndpoint.java.tmpl
@@ -3490,7 +3490,7 @@ CoprocessorService, Coprocessor {
     if (LOG.isTraceEnabled()) LOG.trace("TrxRegionEndpoint coprocessor: start");
     RegionCoprocessorEnvironment tmp_env = 
       (RegionCoprocessorEnvironment)env;
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
     this.m_Region = (HRegion)
        tmp_env.getRegion();
 #else
@@ -4011,7 +4011,7 @@ CoprocessorService, Coprocessor {
 
     try {
           if (LOG.isTraceEnabled()) LOG.trace("TrxRegionEndpoint coprocessor: Trafodion Recovery:  Flushing cache in startRegionAfterRecovery " + m_Region.getRegionInfo().getRegionNameAsString());
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
           m_Region.flush(true);
 #else
           m_Region.flushcache();

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionObserver.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionObserver.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionObserver.java.tmpl
index 5c087af..55c91e9 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionObserver.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/coprocessor/transactional/TrxRegionObserver.java.tmpl
@@ -85,8 +85,10 @@ import org.apache.hadoop.hbase.client.Mutation;
 import java.util.ListIterator;
 import org.apache.hadoop.hbase.Cell;
 
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
+#endif
+#ifdef HDP2.3 HBASE1.1
 import org.apache.hadoop.hbase.regionserver.Region;
 #endif
 
@@ -184,7 +186,7 @@ public void start(CoprocessorEnvironment e) throws IOException {
 
     RegionCoprocessorEnvironment regionCoprEnv = (RegionCoprocessorEnvironment)e;
     RegionCoprocessorEnvironment re = (RegionCoprocessorEnvironment) e;
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
     my_Region = (HRegion) re.getRegion();
 #else
     my_Region = re.getRegion();
@@ -696,7 +698,7 @@ public void createRecoveryzNode(int node, String encodedName, byte [] data) thro
     }
 
     @Override
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
     public void postSplit(ObserverContext<RegionCoprocessorEnvironment> e, Region l, Region r) {
 #else
     public void	postSplit(ObserverContext<RegionCoprocessorEnvironment> e, HRegion l, HRegion r) {
@@ -757,7 +759,7 @@ public void createRecoveryzNode(int node, String encodedName, byte [] data) thro
                 blockNonPhase2.set(true);
 
 	        if(LOG.isInfoEnabled()) {
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
                     HRegion region = (HRegion) c.getEnvironment().getRegion();
 #else
 	            HRegion region = c.getEnvironment().getRegion();
@@ -765,7 +767,7 @@ public void createRecoveryzNode(int node, String encodedName, byte [] data) thro
 	            LOG.debug("preClose -- setting close var to true on: " + region.getRegionInfo().getRegionNameAsString());
 	        }
 	        try {
-	          sbHelper.pendingAndScannersWait(commitPendingTransactions, scanners, transactionsById, pendingDelayLen);
+                 sbHelper.pendingAndScannersWait(commitPendingTransactions, scanners, transactionsById, pendingDelayLen);
 	        } catch(IOException ioe) {
 	          LOG.error("Encountered exception when calling pendingAndScannersWait(): " + ioe);
 	        }
@@ -867,7 +869,7 @@ public void createRecoveryzNode(int node, String encodedName, byte [] data) thro
             }
 
             @Override
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
             public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException {
               if (LOG.isTraceEnabled()) LOG.trace("preCompact: call next with scannerContext limit " + scannerContext);
 #else
@@ -878,7 +880,7 @@ public void createRecoveryzNode(int node, String encodedName, byte [] data) thro
                 boolean skip=false;
                 ConcurrentHashMap<String, Integer>  verCountByCol = new ConcurrentHashMap<String,Integer>();
                 try {
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
   		    ret = s.next(result,scannerContext);
 #else
                     ret = s.next(result,limit);

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java.tmpl
index 6e81a8a..9f1f316 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/CleanOldTransactionsChore.java.tmpl
@@ -27,7 +27,7 @@ import java.io.IOException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 import org.apache.hadoop.hbase.ScheduledChore;
 #else
 import org.apache.hadoop.hbase.Chore;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.coprocessor.transactional.TrxRegionEndpoint;
  * Cleans up committed transactions when they are no longer needed to verify
  * pending transactions.
  */
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 public class CleanOldTransactionsChore extends ScheduledChore {
 #else
 public class CleanOldTransactionsChore extends Chore {
@@ -57,7 +57,7 @@ public class CleanOldTransactionsChore extends Chore {
   public CleanOldTransactionsChore(final TrxRegionEndpoint trx_Region,
                                    final int timer,  
                                    final Stoppable stoppable) {
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
     super("CleanOldTransactionsChore", stoppable, timer);
 #else
     super("CleanOldTransactionsChore", timer, stoppable);

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/KeyValueListScanner.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/KeyValueListScanner.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/KeyValueListScanner.java.tmpl
index 6ba4f99..ae04542 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/KeyValueListScanner.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/KeyValueListScanner.java.tmpl
@@ -158,7 +158,7 @@ public class KeyValueListScanner implements KeyValueScanner {
   public boolean backwardSeek(Cell seekKey) throws IOException {
     throw new NotImplementedException("Not implemented");
   }
-#ifndef CDH1.0
+#ifndef CDH5.4
   @Override
 #endif
   public Cell getNextIndexedKey() {

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/MemoryUsageChore.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/MemoryUsageChore.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/MemoryUsageChore.java.tmpl
index 07c66ef..656868a 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/MemoryUsageChore.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/MemoryUsageChore.java.tmpl
@@ -28,7 +28,7 @@ import java.io.IOException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 import org.apache.hadoop.hbase.ScheduledChore;
 #else
 import org.apache.hadoop.hbase.Chore;
@@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.coprocessor.transactional.TrxRegionEndpoint;
 /**
  * Manages the MemoryMXBean to determine a regionserver's memory usage.
  */
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 public class MemoryUsageChore extends ScheduledChore {
 #else
 public class MemoryUsageChore extends Chore {
@@ -57,7 +57,7 @@ public class MemoryUsageChore extends Chore {
   public MemoryUsageChore(final TrxRegionEndpoint trx_Region,
                           final int timer,  
                           final Stoppable stoppable) {
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
     super("MemoryUsageChore", stoppable, timer);
 #else
     super("MemoryUsageChore", timer, stoppable);

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/TrxTransactionState.java.tmpl
----------------------------------------------------------------------
diff --git a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/TrxTransactionState.java.tmpl b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/TrxTransactionState.java.tmpl
index 0eac088..7d301b3 100644
--- a/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/TrxTransactionState.java.tmpl
+++ b/core/sqf/src/seatrans/hbase-trx/src/main/java/org/apache/hadoop/hbase/regionserver/transactional/TrxTransactionState.java.tmpl
@@ -71,7 +71,7 @@ import org.apache.hadoop.hbase.regionserver.RegionCoprocessorHost;
 import org.apache.hadoop.hbase.regionserver.ScanQueryMatcher;
 import org.apache.hadoop.hbase.regionserver.ScanType;
 import org.apache.hadoop.hbase.regionserver.ScanInfo;
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
 import org.apache.hadoop.hbase.regionserver.ScannerContext;
 #endif
 import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@@ -677,7 +677,7 @@ public class TrxTransactionState extends TransactionState {
             //Store.ScanInfo scaninfo = new Store.ScanInfo(null, 0, 1, HConstants.FOREVER, false, 0, Cell.COMPARATOR);
             ScanInfo scaninfo = new ScanInfo(null, 0, 1, HConstants.FOREVER,KeepDeletedCells.FALSE, 0, KeyValue.COMPARATOR);
            
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1
             //Hbase 1.1.2 and beyond has optimization in TimeRange.compare(cell) to return true for all time ranges if TimeRange.isAllTime() is true.
             //scan.setTimeRange(min, max) instantiates TimeRange with a different constructor (isAllTime is false in this case). This is set 
             //in scan object by the client if specific time range is needed. In all other cases, a default TimeRange() (isAllTime is true)
@@ -730,7 +730,7 @@ public class TrxTransactionState extends TransactionState {
          * @param limit
          * @return true if there are more rows, false if scanner is done
          */
-#ifndef HDP2.3
+#ifdef HBASE1.0 CDH5.4
         @Override
 #endif
         public synchronized boolean next(final List<Cell> outResult, final int limit) throws IOException {          	
@@ -796,13 +796,13 @@ public class TrxTransactionState extends TransactionState {
             return false;
         }
 
-#ifndef HDP2.3
+#ifndef HDP2.3 HBASE1.1
         @Override
 #endif
         public synchronized boolean next(final List<Cell> results) throws IOException {
           return next(results, -1);
         }
-#ifdef HDP2.3
+#ifdef HDP2.3 HBASE1.1 CDH5.5
         @Override
         public boolean next(List<Cell> results, ScannerContext scannerContext) throws IOException {
             return next(results, -1);

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/install/installer/traf_apache_mods
----------------------------------------------------------------------
diff --git a/install/installer/traf_apache_mods b/install/installer/traf_apache_mods
index cf16918..7acebf5 100755
--- a/install/installer/traf_apache_mods
+++ b/install/installer/traf_apache_mods
@@ -48,7 +48,11 @@ sudo chmod 777 $TRAF_CONFIG
 source $TRAF_CONFIG
 
 
-hbase_trx_jar="hbase-trx-apache*.jar"
+hbase_trx_jar="hbase-trx-apache1_1_2*.jar"
+
+if [[ $hbaseVersion =~1.0]]; then
+   hbase_trx_jar="hbase-trx-apache1_0_2*.jar"
+fi
 
 traf_util_jar="trafodion-utility-*.jar"
 

http://git-wip-us.apache.org/repos/asf/incubator-trafodion/blob/6d08029a/install/installer/traf_config_check
----------------------------------------------------------------------
diff --git a/install/installer/traf_config_check b/install/installer/traf_config_check
index a8d8e49..3cadaca 100755
--- a/install/installer/traf_config_check
+++ b/install/installer/traf_config_check
@@ -678,6 +678,11 @@ do
          checkClouderaVersion
          checkRoleGroups
       fi
+
+      if [[ $HADOOP_TYPE == "apache" ]]; then
+         checkApacheVersion
+      fi
+   
       break;
    fi
 done
@@ -701,8 +706,8 @@ function checkClouderaVersion {
 
 if [[ $CDH_5_3_HDP_2_2_SUPPORT == "N" ]]; then
    #Check that Cloudera 5.2 or 5.3 are not installed.
-   if [[ "$CDH_5_4_SUPPORT" == "Y" ]]; then
-      nameOfVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/,.*//' | sed 's/.*\-//' | grep cdh5.4.*)
+   if [[ "$CDH_5_4_SUPPORT" == "Y" ]] || [[ "$CDH_5_5_SUPPORT" == "Y" ]]; then
+      nameOfVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/,.*//' | sed 's/.*\-//' | grep cdh5.[4-6].*)
       #Check that Cloudera 5.[n>4].* is not installed.
       if [[ -z $nameOfVersion ]]; then
          versionInstalled=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/,.*//' | sed 's/.*\-//' | grep cdh[5-9].[7-9].* | wc -l)
@@ -740,8 +745,12 @@ else
    fi
 fi
 
+baseVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/-.*//' | awk {'print$2'})
+
 echo "***INFO: nameOfVersion=$nameOfVersion"
 sudo chmod 777 $TRAF_CONFIG
+sed -i '/hbaseVersion\=/d' $TRAF_CONFIG
+echo "export hbaseVersion=\"$hbaseVersion\"" >> $TRAF_CONFIG
 source $TRAF_CONFIG
 
 }
@@ -800,13 +809,44 @@ if [[ -z $nameOfVersion ]]; then
    fi
 fi
 
+baseVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/-.*//' | awk {'print$2'})
+
 echo "***INFO: nameOfVersion=$nameOfVersion"
 echo "***INFO: HADOOP_PATH=$HADOOP_PATH"
+
+sudo chmod 777 $TRAF_CONFIG
+sed -i '/hbaseVersion\=/d' $TRAF_CONFIG
+echo "export hbaseVersion=\"$hbaseVersion\"" >> $TRAF_CONFIG
 source $TRAF_CONFIG
 
 }
 
+function checkApacheVersion {
 
+if [[ $APACHE_1_0_X_SUPPORT=="Y" || $APACHE_1_1_X_SUPPORT=="Y" ]]; then
+   nameOfVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | awk {'print $2'})
+   #Check that hbase 1.x.x is not installed.
+   if [[ -z $nameOfVersion ]]; then
+      versionInstalled=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | awk {'print $2'} | grep [1].[0-1].* | wc -l)
+      if [[ $versionInstalled -gt "0" ]]; then
+         errorFound=1
+         echo "HADOOP VERSION" >> $ERROR_LOG
+         echo "***ERROR: Trafodion and apache hbase versions may not be compatible" >> $ERROR_LOG
+         echo "***ERROR: Detected apache hbase version:" >> $ERROR_LOG
+         ssh -q -n $node cat $HOME/hbaseVersion.txt >> $ERROR_LOG
+      fi
+   fi
+fi
+
+baseVersion=$(ssh -q -n $node grep "Version" $HOME/hbaseVersion.txt | sed 's/-.*//' | awk {'print$2'}|sed s/,//g)
+
+echo "***INFO: nameOfVersion=$nameOfVersion"
+sudo chmod 777 $TRAF_CONFIG
+sed -i '/hbaseVersion\=/d' $TRAF_CONFIG
+echo "export hbaseVersion=\"$hbaseVersion\"" >> $TRAF_CONFIG
+source $TRAF_CONFIG
+
+}
 function checkHadoopNames {
 if [[ -z "$HDFS_USER" ]]; then
    errorFound=1
@@ -928,6 +968,7 @@ else
 
 fi
 
+install_features_path=$(tar -tf $TRAF_BUILD_PATH | grep "install_features")
 
 if [[ ! -z $install_features_path ]]; then
    if [[ "$ONE_TAR_INSTALL" == "N" ]]; then