You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ro...@apache.org on 2018/02/18 08:09:50 UTC

[18/18] hadoop git commit: YARN-7919. Refactor timelineservice-hbase module into submodules. Contributed by Haibo Chen.

YARN-7919. Refactor timelineservice-hbase module into submodules. Contributed by Haibo Chen.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/9af30d46
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/9af30d46
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/9af30d46

Branch: refs/heads/trunk
Commit: 9af30d46c6e82332a8eda20cb3eb5f987e25e7a2
Parents: a1e56a6
Author: Rohith Sharma K S <ro...@apache.org>
Authored: Sat Feb 17 20:30:28 2018 +0530
Committer: Rohith Sharma K S <ro...@apache.org>
Committed: Sat Feb 17 20:30:28 2018 +0530

----------------------------------------------------------------------
 .../resources/assemblies/hadoop-yarn-dist.xml   |  22 +-
 hadoop-project/pom.xml                          |  14 +-
 .../pom.xml                                     |  26 +-
 ...stTimelineReaderWebServicesHBaseStorage.java |   8 +-
 .../storage/TestHBaseTimelineStorageApps.java   |  50 +-
 .../TestHBaseTimelineStorageEntities.java       |  89 +--
 .../storage/TestHBaseTimelineStorageSchema.java |  61 +-
 .../flow/TestHBaseStorageFlowActivity.java      |  24 +-
 .../storage/flow/TestHBaseStorageFlowRun.java   |  42 +-
 .../flow/TestHBaseStorageFlowRunCompaction.java |  58 +-
 .../pom.xml                                     | 219 ++++++
 .../reader/filter/TimelineFilterUtils.java      | 313 ++++++++
 .../reader/filter/package-info.java             |  28 +
 .../storage/HBaseTimelineReaderImpl.java        |  96 +++
 .../storage/HBaseTimelineWriterImpl.java        | 611 ++++++++++++++++
 .../storage/TimelineSchemaCreator.java          | 368 ++++++++++
 .../storage/application/ApplicationTableRW.java | 137 ++++
 .../storage/application/package-info.java       |  28 +
 .../storage/apptoflow/AppToFlowTableRW.java     |  92 +++
 .../storage/apptoflow/package-info.java         |  28 +
 .../storage/common/BaseTableRW.java             | 167 +++++
 .../storage/common/ColumnRWHelper.java          | 487 +++++++++++++
 .../common/HBaseTimelineStorageUtils.java       | 121 +++
 .../common/TimelineHBaseSchemaConstants.java    |  71 ++
 .../storage/common/TypedBufferedMutator.java    |  73 ++
 .../storage/common/package-info.java            |  28 +
 .../storage/entity/EntityTableRW.java           | 136 ++++
 .../storage/entity/package-info.java            |  28 +
 .../storage/flow/FlowActivityTableRW.java       |  91 +++
 .../storage/flow/FlowRunTableRW.java            | 102 +++
 .../storage/flow/package-info.java              |  29 +
 .../timelineservice/storage/package-info.java   |  28 +
 .../reader/AbstractTimelineStorageReader.java   | 159 ++++
 .../storage/reader/ApplicationEntityReader.java | 523 +++++++++++++
 .../storage/reader/EntityTypeReader.java        | 175 +++++
 .../reader/FlowActivityEntityReader.java        | 186 +++++
 .../storage/reader/FlowRunEntityReader.java     | 298 ++++++++
 .../storage/reader/GenericEntityReader.java     | 655 +++++++++++++++++
 .../reader/SubApplicationEntityReader.java      | 489 +++++++++++++
 .../storage/reader/TimelineEntityReader.java    | 464 ++++++++++++
 .../reader/TimelineEntityReaderFactory.java     | 105 +++
 .../storage/reader/package-info.java            |  28 +
 .../subapplication/SubApplicationTableRW.java   | 137 ++++
 .../storage/subapplication/package-info.java    |  28 +
 .../common/TestHBaseTimelineStorageUtils.java   |  33 +
 .../pom.xml                                     | 132 ++++
 .../storage/application/ApplicationColumn.java  | 101 +++
 .../application/ApplicationColumnFamily.java    |  65 ++
 .../application/ApplicationColumnPrefix.java    | 150 ++++
 .../storage/application/ApplicationRowKey.java  | 251 +++++++
 .../application/ApplicationRowKeyPrefix.java    |  69 ++
 .../storage/application/ApplicationTable.java   |  60 ++
 .../storage/application/package-info.java       |  28 +
 .../storage/apptoflow/AppToFlowColumn.java      |  95 +++
 .../apptoflow/AppToFlowColumnFamily.java        |  51 ++
 .../apptoflow/AppToFlowColumnPrefix.java        | 105 +++
 .../storage/apptoflow/AppToFlowRowKey.java      |  58 ++
 .../storage/apptoflow/AppToFlowTable.java       |  60 ++
 .../storage/apptoflow/package-info.java         |  28 +
 .../storage/common/AppIdKeyConverter.java       |  97 +++
 .../storage/common/BaseTable.java               |  27 +
 .../timelineservice/storage/common/Column.java  |  56 ++
 .../storage/common/ColumnFamily.java            |  34 +
 .../storage/common/ColumnHelper.java            | 101 +++
 .../storage/common/ColumnPrefix.java            |  71 ++
 .../storage/common/EventColumnName.java         |  63 ++
 .../common/EventColumnNameConverter.java        |  99 +++
 .../storage/common/GenericConverter.java        |  48 ++
 .../common/HBaseTimelineSchemaUtils.java        | 156 ++++
 .../storage/common/KeyConverter.java            |  41 ++
 .../storage/common/KeyConverterToString.java    |  38 +
 .../storage/common/LongConverter.java           |  94 +++
 .../storage/common/LongKeyConverter.java        |  68 ++
 .../storage/common/NumericValueConverter.java   |  39 +
 .../timelineservice/storage/common/Range.java   |  62 ++
 .../storage/common/RowKeyPrefix.java            |  42 ++
 .../storage/common/Separator.java               | 575 +++++++++++++++
 .../storage/common/StringKeyConverter.java      |  54 ++
 .../storage/common/TimestampGenerator.java      | 116 +++
 .../storage/common/ValueConverter.java          |  47 ++
 .../storage/common/package-info.java            |  28 +
 .../storage/entity/EntityColumn.java            | 105 +++
 .../storage/entity/EntityColumnFamily.java      |  65 ++
 .../storage/entity/EntityColumnPrefix.java      | 162 +++++
 .../storage/entity/EntityRowKey.java            | 299 ++++++++
 .../storage/entity/EntityRowKeyPrefix.java      |  77 ++
 .../storage/entity/EntityTable.java             |  61 ++
 .../storage/entity/package-info.java            |  28 +
 .../flow/AggregationCompactionDimension.java    |  63 ++
 .../storage/flow/AggregationOperation.java      |  94 +++
 .../timelineservice/storage/flow/Attribute.java |  39 +
 .../storage/flow/FlowActivityColumnFamily.java  |  55 ++
 .../storage/flow/FlowActivityColumnPrefix.java  | 133 ++++
 .../storage/flow/FlowActivityRowKey.java        | 247 +++++++
 .../storage/flow/FlowActivityRowKeyPrefix.java  |  60 ++
 .../storage/flow/FlowActivityTable.java         |  45 ++
 .../storage/flow/FlowRunColumn.java             | 112 +++
 .../storage/flow/FlowRunColumnFamily.java       |  54 ++
 .../storage/flow/FlowRunColumnPrefix.java       | 129 ++++
 .../storage/flow/FlowRunRowKey.java             | 233 ++++++
 .../storage/flow/FlowRunRowKeyPrefix.java       |  54 ++
 .../storage/flow/FlowRunTable.java              |  77 ++
 .../storage/flow/package-info.java              |  29 +
 .../timelineservice/storage/package-info.java   |  28 +
 .../subapplication/SubApplicationColumn.java    |  99 +++
 .../SubApplicationColumnFamily.java             |  68 ++
 .../SubApplicationColumnPrefix.java             | 163 +++++
 .../subapplication/SubApplicationRowKey.java    | 290 ++++++++
 .../SubApplicationRowKeyPrefix.java             |  69 ++
 .../subapplication/SubApplicationTable.java     |  64 ++
 .../storage/subapplication/package-info.java    |  28 +
 .../TestCustomApplicationIdConversion.java      |  39 +
 .../storage/common/TestKeyConverters.java       | 134 ++++
 .../storage/common/TestRowKeys.java             | 276 +++++++
 .../storage/common/TestRowKeysAsString.java     | 144 ++++
 .../storage/common/TestSeparator.java           | 215 ++++++
 .../pom.xml                                     | 161 ++++
 .../src/assembly/coprocessor.xml                |  37 +
 .../common/HBaseTimelineServerUtils.java        | 135 ++++
 .../storage/common/package-info.java            |  28 +
 .../storage/flow/FlowRunCoprocessor.java        | 277 +++++++
 .../storage/flow/FlowScanner.java               | 723 ++++++++++++++++++
 .../storage/flow/FlowScannerOperation.java      |  46 ++
 .../storage/flow/package-info.java              |  29 +
 .../timelineservice/storage/package-info.java   |  28 +
 .../pom.xml                                     | 193 +----
 .../reader/filter/TimelineFilterUtils.java      | 308 --------
 .../reader/filter/package-info.java             |  28 -
 .../storage/HBaseTimelineReaderImpl.java        |  96 ---
 .../storage/HBaseTimelineWriterImpl.java        | 593 ---------------
 .../storage/TimelineSchemaCreator.java          | 367 ----------
 .../storage/application/ApplicationColumn.java  | 108 ---
 .../application/ApplicationColumnFamily.java    |  65 --
 .../application/ApplicationColumnPrefix.java    | 236 ------
 .../storage/application/ApplicationRowKey.java  | 251 -------
 .../application/ApplicationRowKeyPrefix.java    |  69 --
 .../storage/application/ApplicationTable.java   | 170 -----
 .../storage/application/package-info.java       |  28 -
 .../storage/apptoflow/AppToFlowColumn.java      | 101 ---
 .../apptoflow/AppToFlowColumnFamily.java        |  51 --
 .../apptoflow/AppToFlowColumnPrefix.java        | 206 ------
 .../storage/apptoflow/AppToFlowRowKey.java      |  58 --
 .../storage/apptoflow/AppToFlowTable.java       | 125 ----
 .../storage/apptoflow/package-info.java         |  28 -
 .../storage/common/AppIdKeyConverter.java       |  97 ---
 .../storage/common/BaseTable.java               | 167 -----
 .../common/BufferedMutatorDelegator.java        |  73 --
 .../timelineservice/storage/common/Column.java  |  80 --
 .../storage/common/ColumnFamily.java            |  34 -
 .../storage/common/ColumnHelper.java            | 414 -----------
 .../storage/common/ColumnPrefix.java            | 145 ----
 .../storage/common/EventColumnName.java         |  63 --
 .../common/EventColumnNameConverter.java        |  99 ---
 .../storage/common/GenericConverter.java        |  48 --
 .../common/HBaseTimelineStorageUtils.java       | 354 ---------
 .../storage/common/KeyConverter.java            |  41 --
 .../storage/common/KeyConverterToString.java    |  38 -
 .../storage/common/LongConverter.java           |  94 ---
 .../storage/common/LongKeyConverter.java        |  68 --
 .../storage/common/NumericValueConverter.java   |  39 -
 .../timelineservice/storage/common/Range.java   |  62 --
 .../storage/common/RowKeyPrefix.java            |  42 --
 .../storage/common/Separator.java               | 575 ---------------
 .../storage/common/StringKeyConverter.java      |  54 --
 .../common/TimelineHBaseSchemaConstants.java    |  71 --
 .../storage/common/TimestampGenerator.java      | 116 ---
 .../storage/common/TypedBufferedMutator.java    |  28 -
 .../storage/common/ValueConverter.java          |  47 --
 .../storage/common/package-info.java            |  28 -
 .../storage/entity/EntityColumn.java            | 112 ---
 .../storage/entity/EntityColumnFamily.java      |  65 --
 .../storage/entity/EntityColumnPrefix.java      | 249 -------
 .../storage/entity/EntityRowKey.java            | 299 --------
 .../storage/entity/EntityRowKeyPrefix.java      |  77 --
 .../storage/entity/EntityTable.java             | 170 -----
 .../storage/entity/package-info.java            |  28 -
 .../flow/AggregationCompactionDimension.java    |  63 --
 .../storage/flow/AggregationOperation.java      |  94 ---
 .../timelineservice/storage/flow/Attribute.java |  39 -
 .../storage/flow/FlowActivityColumnFamily.java  |  55 --
 .../storage/flow/FlowActivityColumnPrefix.java  | 221 ------
 .../storage/flow/FlowActivityRowKey.java        | 247 -------
 .../storage/flow/FlowActivityRowKeyPrefix.java  |  60 --
 .../storage/flow/FlowActivityTable.java         | 109 ---
 .../storage/flow/FlowRunColumn.java             | 131 ----
 .../storage/flow/FlowRunColumnFamily.java       |  54 --
 .../storage/flow/FlowRunColumnPrefix.java       | 217 ------
 .../storage/flow/FlowRunCoprocessor.java        | 277 -------
 .../storage/flow/FlowRunRowKey.java             | 233 ------
 .../storage/flow/FlowRunRowKeyPrefix.java       |  54 --
 .../storage/flow/FlowRunTable.java              | 151 ----
 .../storage/flow/FlowScanner.java               | 729 -------------------
 .../storage/flow/FlowScannerOperation.java      |  46 --
 .../storage/flow/package-info.java              |  29 -
 .../timelineservice/storage/package-info.java   |  28 -
 .../reader/AbstractTimelineStorageReader.java   | 158 ----
 .../storage/reader/ApplicationEntityReader.java | 520 -------------
 .../storage/reader/EntityTypeReader.java        | 175 -----
 .../reader/FlowActivityEntityReader.java        | 185 -----
 .../storage/reader/FlowRunEntityReader.java     | 294 --------
 .../storage/reader/GenericEntityReader.java     | 651 -----------------
 .../reader/SubApplicationEntityReader.java      | 488 -------------
 .../storage/reader/TimelineEntityReader.java    | 459 ------------
 .../reader/TimelineEntityReaderFactory.java     | 105 ---
 .../storage/reader/package-info.java            |  28 -
 .../subapplication/SubApplicationColumn.java    | 108 ---
 .../SubApplicationColumnFamily.java             |  68 --
 .../SubApplicationColumnPrefix.java             | 250 -------
 .../subapplication/SubApplicationRowKey.java    | 290 --------
 .../SubApplicationRowKeyPrefix.java             |  69 --
 .../subapplication/SubApplicationTable.java     | 174 -----
 .../storage/subapplication/package-info.java    |  28 -
 .../TestCustomApplicationIdConversion.java      |  39 -
 .../common/TestHBaseTimelineStorageUtils.java   |  33 -
 .../storage/common/TestKeyConverters.java       | 134 ----
 .../storage/common/TestRowKeys.java             | 276 -------
 .../storage/common/TestRowKeysAsString.java     | 144 ----
 .../storage/common/TestSeparator.java           | 215 ------
 218 files changed, 15320 insertions(+), 14832 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
----------------------------------------------------------------------
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
index a77dd20..2c266b6 100644
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
@@ -213,7 +213,11 @@
     </fileSet>
     <!-- Copy dependecies from hadoop-yarn-server-timelineservice as well -->
     <fileSet>
-      <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/target/lib</directory>
+      <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/target/lib</directory>
+      <outputDirectory>share/hadoop/${hadoop.component}/timelineservice/lib</outputDirectory>
+    </fileSet>
+    <fileSet>
+      <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-common/target/lib</directory>
       <outputDirectory>share/hadoop/${hadoop.component}/timelineservice/lib</outputDirectory>
     </fileSet>
   </fileSets>
@@ -221,12 +225,26 @@
     <moduleSet>
       <includes>
         <include>org.apache.hadoop:hadoop-yarn-server-timelineservice</include>
-        <include>org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase</include>
+        <include>org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-client</include>
+        <include>org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-common</include>
+      </includes>
+      <binaries>
+        <outputDirectory>share/hadoop/${hadoop.component}/timelineservice</outputDirectory>
+        <includeDependencies>false</includeDependencies>
+        <unpack>false</unpack>
+      </binaries>
+    </moduleSet>
+    <moduleSet>
+      <includes>
+        <include>org.apache.hadoop:hadoop-yarn-server-timelineservice-hbase-server</include>
       </includes>
       <binaries>
         <outputDirectory>share/hadoop/${hadoop.component}/timelineservice</outputDirectory>
         <includeDependencies>false</includeDependencies>
+        <!-- This is the id of the timelineservice-hbase-coprocessor assembly descriptor -->
+        <attachmentClassifier>coprocessor</attachmentClassifier>
         <unpack>false</unpack>
+        <outputFileNameMapping>hadoop-yarn-server-timelineservice-hbase-coprocessor-${module.version}.${module.extension}</outputFileNameMapping>
       </binaries>
     </moduleSet>
     <moduleSet>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-project/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
index c27596c..ce51c99 100644
--- a/hadoop-project/pom.xml
+++ b/hadoop-project/pom.xml
@@ -397,7 +397,19 @@
 
       <dependency>
         <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-yarn-server-timelineservice-hbase</artifactId>
+        <artifactId>hadoop-yarn-server-timelineservice-hbase-client</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-timelineservice-hbase-common</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-server-timelineservice-hbase-server</artifactId>
         <version>${project.version}</version>
       </dependency>
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
index f36897b..d9f992d 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/pom.xml
@@ -60,7 +60,31 @@
 
     <dependency>
       <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-yarn-server-timelineservice-hbase</artifactId>
+      <artifactId>hadoop-yarn-server-timelineservice-hbase-client</artifactId>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-timelineservice-hbase-common</artifactId>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-common</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-timelineservice-hbase-server</artifactId>
       <scope>test</scope>
       <exclusions>
         <exclusion>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
index b2029ca..33d8dcd 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/reader/TestTimelineReaderWebServicesHBaseStorage.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineMetric.Type;
 import org.apache.hadoop.yarn.server.metrics.ApplicationMetricsConstants;
 import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils;
 import org.apache.hadoop.yarn.server.utils.BuilderUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
@@ -70,7 +70,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
     extends AbstractTimelineReaderHBaseTestBase {
   private static long ts = System.currentTimeMillis();
   private static long dayTs =
-      HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(ts);
+      HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(ts);
   private static String doAsUser = "remoteuser";
 
   @BeforeClass
@@ -371,7 +371,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
             BuilderUtils.newApplicationId(timestamp, count++);
         ApplicationEntity appEntity = new ApplicationEntity();
         appEntity.setId(
-            HBaseTimelineStorageUtils.convertApplicationIdToString(appId));
+            HBaseTimelineSchemaUtils.convertApplicationIdToString(appId));
         appEntity.setCreatedTime(timestamp);
 
         TimelineEvent created = new TimelineEvent();
@@ -929,7 +929,7 @@ public class TestTimelineReaderWebServicesHBaseStorage
           new String[] {"flow1"});
 
       long firstFlowActivity =
-          HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(1425016501000L);
+          HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(1425016501000L);
 
       DateFormat fmt = TimelineReaderWebServices.DATE_FORMAT.get();
       uri = URI.create("http://localhost:" + getServerPort() + "/ws/v2/" +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
index 111008a..bc33427 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageApps.java
@@ -68,10 +68,11 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Fiel
 import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumn;
 import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationColumnPrefix;
 import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationRowKey;
-import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.application.ApplicationTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter;
@@ -173,7 +174,7 @@ public class TestHBaseTimelineStorageApps {
       scan.setStartRow(Bytes.toBytes(cluster));
       scan.setStopRow(Bytes.toBytes(cluster + "1"));
       Connection conn = ConnectionFactory.createConnection(c1);
-      ResultScanner resultScanner = new ApplicationTable()
+      ResultScanner resultScanner = new ApplicationTableRW()
           .getResultScanner(c1, conn, scan);
 
       assertTrue(resultScanner != null);
@@ -308,7 +309,7 @@ public class TestHBaseTimelineStorageApps {
       Get get = new Get(rowKey);
       get.setMaxVersions(Integer.MAX_VALUE);
       Connection conn = ConnectionFactory.createConnection(c1);
-      Result result = new ApplicationTable().getResult(c1, conn, get);
+      Result result = new ApplicationTableRW().getResult(c1, conn, get);
 
       assertTrue(result != null);
       assertEquals(17, result.size());
@@ -319,24 +320,24 @@ public class TestHBaseTimelineStorageApps {
           appId));
 
       // check info column family
-      String id1 = ApplicationColumn.ID.readResult(result).toString();
+      String id1 =
+          ColumnRWHelper.readResult(result, ApplicationColumn.ID).toString();
       assertEquals(appId, id1);
 
-      Long cTime1 =
-          (Long) ApplicationColumn.CREATED_TIME.readResult(result);
+      Long cTime1 = (Long)
+          ColumnRWHelper.readResult(result, ApplicationColumn.CREATED_TIME);
       assertEquals(cTime, cTime1);
 
-      Map<String, Object> infoColumns =
-          ApplicationColumnPrefix.INFO.readResults(result,
-              new StringKeyConverter());
+      Map<String, Object> infoColumns = ColumnRWHelper.readResults(
+          result, ApplicationColumnPrefix.INFO, new StringKeyConverter());
       assertEquals(infoMap, infoColumns);
 
       // Remember isRelatedTo is of type Map<String, Set<String>>
       for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo
           .entrySet()) {
-        Object isRelatedToValue =
-            ApplicationColumnPrefix.IS_RELATED_TO.readResult(result,
-                isRelatedToEntry.getKey());
+        Object isRelatedToValue = ColumnRWHelper.readResult(
+            result, ApplicationColumnPrefix.IS_RELATED_TO,
+            isRelatedToEntry.getKey());
         String compoundValue = isRelatedToValue.toString();
         // id7?id9?id6
         Set<String> isRelatedToValues =
@@ -351,9 +352,9 @@ public class TestHBaseTimelineStorageApps {
       // RelatesTo
       for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo
           .entrySet()) {
-        String compoundValue =
-            ApplicationColumnPrefix.RELATES_TO.readResult(result,
-                relatesToEntry.getKey()).toString();
+        String compoundValue = ColumnRWHelper.readResult(result,
+            ApplicationColumnPrefix.RELATES_TO, relatesToEntry.getKey())
+            .toString();
         // id3?id4?id5
         Set<String> relatesToValues =
             new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
@@ -366,14 +367,13 @@ public class TestHBaseTimelineStorageApps {
 
       KeyConverter<String> stringKeyConverter = new StringKeyConverter();
       // Configuration
-      Map<String, Object> configColumns =
-          ApplicationColumnPrefix.CONFIG
-              .readResults(result, stringKeyConverter);
+      Map<String, Object> configColumns = ColumnRWHelper.readResults(
+          result, ApplicationColumnPrefix.CONFIG, stringKeyConverter);
       assertEquals(conf, configColumns);
 
       NavigableMap<String, NavigableMap<Long, Number>> metricsResult =
-          ApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result,
-              stringKeyConverter);
+          ColumnRWHelper.readResultsWithTimestamps(
+              result, ApplicationColumnPrefix.METRIC, stringKeyConverter);
 
       NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
       matchMetrics(metricValues, metricMap);
@@ -500,7 +500,7 @@ public class TestHBaseTimelineStorageApps {
     event.addInfo(expKey, expVal);
 
     final TimelineEntity entity = new ApplicationEntity();
-    entity.setId(HBaseTimelineStorageUtils.convertApplicationIdToString(
+    entity.setId(HBaseTimelineSchemaUtils.convertApplicationIdToString(
         ApplicationId.newInstance(0, 1)));
     entity.addEvent(event);
 
@@ -531,7 +531,7 @@ public class TestHBaseTimelineStorageApps {
       Get get = new Get(rowKey);
       get.setMaxVersions(Integer.MAX_VALUE);
       Connection conn = ConnectionFactory.createConnection(c1);
-      Result result = new ApplicationTable().getResult(c1, conn, get);
+      Result result = new ApplicationTableRW().getResult(c1, conn, get);
 
       assertTrue(result != null);
 
@@ -541,8 +541,8 @@ public class TestHBaseTimelineStorageApps {
           appName));
 
       Map<EventColumnName, Object> eventsResult =
-          ApplicationColumnPrefix.EVENT.readResults(result,
-              new EventColumnNameConverter());
+          ColumnRWHelper.readResults(result,
+              ApplicationColumnPrefix.EVENT, new EventColumnNameConverter());
       // there should be only one event
       assertEquals(1, eventsResult.size());
       for (Map.Entry<EventColumnName, Object> e : eventsResult.entrySet()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java
index 5e08999..90a6959 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageEntities.java
@@ -62,9 +62,10 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyVa
 import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelineKeyValuesFilter;
 import org.apache.hadoop.yarn.server.timelineservice.reader.filter.TimelinePrefixFilter;
 import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnRWHelper;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnName;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.EventColumnNameConverter;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.KeyConverter;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.Separator;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.StringKeyConverter;
@@ -73,12 +74,12 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumn
 import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityColumnPrefix;
 import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKey;
 import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
 import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumn;
 import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationColumnPrefix;
 import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKey;
 import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationRowKeyPrefix;
-import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.subapplication.SubApplicationTableRW;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Assert;
@@ -208,7 +209,7 @@ public class TestHBaseTimelineStorageEntities {
       String flow = "some_flow_name";
       String flowVersion = "AB7822C10F1111";
       long runid = 1002345678919L;
-      String appName = HBaseTimelineStorageUtils.convertApplicationIdToString(
+      String appName = HBaseTimelineSchemaUtils.convertApplicationIdToString(
           ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1)
       );
       hbi.write(new TimelineCollectorContext(cluster, user, flow, flowVersion,
@@ -224,7 +225,7 @@ public class TestHBaseTimelineStorageEntities {
       s.setStartRow(startRow);
       s.setMaxVersions(Integer.MAX_VALUE);
       Connection conn = ConnectionFactory.createConnection(c1);
-      ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
+      ResultScanner scanner = new EntityTableRW().getResultScanner(c1, conn, s);
 
       int rowCount = 0;
       int colCount = 0;
@@ -238,26 +239,27 @@ public class TestHBaseTimelineStorageEntities {
               entity));
 
           // check info column family
-          String id1 = EntityColumn.ID.readResult(result).toString();
+          String id1 =
+              ColumnRWHelper.readResult(result, EntityColumn.ID).toString();
           assertEquals(id, id1);
 
-          String type1 = EntityColumn.TYPE.readResult(result).toString();
+          String type1 =
+              ColumnRWHelper.readResult(result, EntityColumn.TYPE).toString();
           assertEquals(type, type1);
 
-          Long cTime1 = (Long) EntityColumn.CREATED_TIME.readResult(result);
+          Long cTime1 = (Long)
+              ColumnRWHelper.readResult(result, EntityColumn.CREATED_TIME);
           assertEquals(cTime1, cTime);
 
-          Map<String, Object> infoColumns =
-              EntityColumnPrefix.INFO.readResults(result,
-                  new StringKeyConverter());
+          Map<String, Object> infoColumns = ColumnRWHelper.readResults(
+              result, EntityColumnPrefix.INFO, new StringKeyConverter());
           assertEquals(infoMap, infoColumns);
 
           // Remember isRelatedTo is of type Map<String, Set<String>>
           for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo
               .entrySet()) {
-            Object isRelatedToValue =
-                EntityColumnPrefix.IS_RELATED_TO.readResult(result,
-                    isRelatedToEntry.getKey());
+            Object isRelatedToValue = ColumnRWHelper.readResult(result,
+                EntityColumnPrefix.IS_RELATED_TO, isRelatedToEntry.getKey());
             String compoundValue = isRelatedToValue.toString();
             // id7?id9?id6
             Set<String> isRelatedToValues =
@@ -273,8 +275,9 @@ public class TestHBaseTimelineStorageEntities {
           // RelatesTo
           for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo
               .entrySet()) {
-            String compoundValue = EntityColumnPrefix.RELATES_TO
-                .readResult(result, relatesToEntry.getKey()).toString();
+            String compoundValue = ColumnRWHelper.readResult(result,
+                EntityColumnPrefix.RELATES_TO, relatesToEntry.getKey())
+                .toString();
             // id3?id4?id5
             Set<String> relatesToValues =
                 new HashSet<String>(
@@ -287,13 +290,13 @@ public class TestHBaseTimelineStorageEntities {
           }
 
           // Configuration
-          Map<String, Object> configColumns =
-              EntityColumnPrefix.CONFIG.readResults(result, stringKeyConverter);
+          Map<String, Object> configColumns = ColumnRWHelper.readResults(
+              result, EntityColumnPrefix.CONFIG, stringKeyConverter);
           assertEquals(conf, configColumns);
 
           NavigableMap<String, NavigableMap<Long, Number>> metricsResult =
-              EntityColumnPrefix.METRIC.readResultsWithTimestamps(result,
-                  stringKeyConverter);
+              ColumnRWHelper.readResultsWithTimestamps(
+                  result, EntityColumnPrefix.METRIC, stringKeyConverter);
 
           NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
           matchMetrics(metricValues, metricMap);
@@ -386,14 +389,14 @@ public class TestHBaseTimelineStorageEntities {
       Set<TimelineMetric> metrics, Long cTime, TimelineMetric m1)
       throws IOException {
     Scan s = new Scan();
-    // read from SubApplicationTable
+    // read from SubApplicationTableRW
     byte[] startRow = new SubApplicationRowKeyPrefix(cluster, subAppUser, null,
         null, null, null).getRowKeyPrefix();
     s.setStartRow(startRow);
     s.setMaxVersions(Integer.MAX_VALUE);
     Connection conn = ConnectionFactory.createConnection(c1);
     ResultScanner scanner =
-        new SubApplicationTable().getResultScanner(c1, conn, s);
+        new SubApplicationTableRW().getResultScanner(c1, conn, s);
 
     int rowCount = 0;
     int colCount = 0;
@@ -407,25 +410,28 @@ public class TestHBaseTimelineStorageEntities {
             user, entity));
 
         // check info column family
-        String id1 = SubApplicationColumn.ID.readResult(result).toString();
+        String id1 = ColumnRWHelper.readResult(result, SubApplicationColumn.ID)
+            .toString();
         assertEquals(id, id1);
 
-        String type1 = SubApplicationColumn.TYPE.readResult(result).toString();
+        String type1 = ColumnRWHelper.readResult(result,
+            SubApplicationColumn.TYPE).toString();
         assertEquals(type, type1);
 
-        Long cTime1 =
-            (Long) SubApplicationColumn.CREATED_TIME.readResult(result);
+        Long cTime1 = (Long) ColumnRWHelper.readResult(result,
+            SubApplicationColumn.CREATED_TIME);
         assertEquals(cTime1, cTime);
 
-        Map<String, Object> infoColumns = SubApplicationColumnPrefix.INFO
-            .readResults(result, new StringKeyConverter());
+        Map<String, Object> infoColumns = ColumnRWHelper.readResults(
+            result, SubApplicationColumnPrefix.INFO, new StringKeyConverter());
         assertEquals(infoMap, infoColumns);
 
         // Remember isRelatedTo is of type Map<String, Set<String>>
         for (Map.Entry<String, Set<String>> isRelatedToEntry : isRelatedTo
             .entrySet()) {
-          Object isRelatedToValue = SubApplicationColumnPrefix.IS_RELATED_TO
-              .readResult(result, isRelatedToEntry.getKey());
+          Object isRelatedToValue = ColumnRWHelper.readResult(
+              result, SubApplicationColumnPrefix.IS_RELATED_TO,
+              isRelatedToEntry.getKey());
           String compoundValue = isRelatedToValue.toString();
           // id7?id9?id6
           Set<String> isRelatedToValues =
@@ -440,8 +446,9 @@ public class TestHBaseTimelineStorageEntities {
         // RelatesTo
         for (Map.Entry<String, Set<String>> relatesToEntry : relatesTo
             .entrySet()) {
-          String compoundValue = SubApplicationColumnPrefix.RELATES_TO
-              .readResult(result, relatesToEntry.getKey()).toString();
+          String compoundValue = ColumnRWHelper.readResult(result,
+              SubApplicationColumnPrefix.RELATES_TO, relatesToEntry.getKey())
+              .toString();
           // id3?id4?id5
           Set<String> relatesToValues =
               new HashSet<String>(Separator.VALUES.splitEncoded(compoundValue));
@@ -453,13 +460,13 @@ public class TestHBaseTimelineStorageEntities {
         }
 
         // Configuration
-        Map<String, Object> configColumns = SubApplicationColumnPrefix.CONFIG
-            .readResults(result, stringKeyConverter);
+        Map<String, Object> configColumns = ColumnRWHelper.readResults(
+            result, SubApplicationColumnPrefix.CONFIG, stringKeyConverter);
         assertEquals(conf, configColumns);
 
         NavigableMap<String, NavigableMap<Long, Number>> metricsResult =
-            SubApplicationColumnPrefix.METRIC.readResultsWithTimestamps(result,
-                stringKeyConverter);
+            ColumnRWHelper.readResultsWithTimestamps(
+                result, SubApplicationColumnPrefix.METRIC, stringKeyConverter);
 
         NavigableMap<Long, Number> metricMap = metricsResult.get(m1.getId());
         matchMetrics(metricValues, metricMap);
@@ -511,7 +518,7 @@ public class TestHBaseTimelineStorageEntities {
       String flow = "other_flow_name";
       String flowVersion = "1111F01C2287BA";
       long runid = 1009876543218L;
-      String appName = HBaseTimelineStorageUtils.convertApplicationIdToString(
+      String appName = HBaseTimelineSchemaUtils.convertApplicationIdToString(
           ApplicationId.newInstance(System.currentTimeMillis() + 9000000L, 1));
       byte[] startRow =
           new EntityRowKeyPrefix(cluster, user, flow, runid, appName)
@@ -525,7 +532,7 @@ public class TestHBaseTimelineStorageEntities {
       s.setStartRow(startRow);
       s.addFamily(EntityColumnFamily.INFO.getBytes());
       Connection conn = ConnectionFactory.createConnection(c1);
-      ResultScanner scanner = new EntityTable().getResultScanner(c1, conn, s);
+      ResultScanner scanner = new EntityTableRW().getResultScanner(c1, conn, s);
 
       int rowCount = 0;
       for (Result result : scanner) {
@@ -538,8 +545,8 @@ public class TestHBaseTimelineStorageEntities {
               entity));
 
           Map<EventColumnName, Object> eventsResult =
-              EntityColumnPrefix.EVENT.readResults(result,
-                  new EventColumnNameConverter());
+              ColumnRWHelper.readResults(result,
+                  EntityColumnPrefix.EVENT, new EventColumnNameConverter());
           // there should be only one event
           assertEquals(1, eventsResult.size());
           for (Map.Entry<EventColumnName, Object> e : eventsResult.entrySet()) {
@@ -604,7 +611,7 @@ public class TestHBaseTimelineStorageEntities {
 
     final TimelineEntity entity = new ApplicationEntity();
     entity.setId(
-        HBaseTimelineStorageUtils.convertApplicationIdToString(
+        HBaseTimelineSchemaUtils.convertApplicationIdToString(
             ApplicationId.newInstance(0, 1)));
     entity.addEvent(event);
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java
index 0dcd171..f838178 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/TestHBaseTimelineStorageSchema.java
@@ -21,6 +21,9 @@ package org.apache.hadoop.yarn.server.timelineservice.storage;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
 
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
+import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTableRW;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -35,10 +38,6 @@ import org.apache.hadoop.hbase.client.Connection;
 import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.client.Table;
 
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
-import org.apache.hadoop.yarn.server.timelineservice.storage.flow.FlowRunTable;
-
 /**
  * Unit tests for checking different schema prefixes.
  */
@@ -61,22 +60,24 @@ public class TestHBaseTimelineStorageSchema {
     conn = ConnectionFactory.createConnection(hbaseConf);
     Admin admin = conn.getAdmin();
 
-    TableName entityTableName = BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME);
+    TableName entityTableName = BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(entityTableName));
     assertTrue(entityTableName.getNameAsString().startsWith(
         YarnConfiguration.DEFAULT_TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX));
-    Table entityTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME));
+    Table entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(entityTable);
 
-    TableName flowRunTableName = BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME);
+    TableName flowRunTableName = BaseTableRW.getTableName(hbaseConf,
+        FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(flowRunTableName));
     assertTrue(flowRunTableName.getNameAsString().startsWith(
         YarnConfiguration.DEFAULT_TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX));
-    Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table flowRunTable = conn.getTable(
+        BaseTableRW.getTableName(hbaseConf,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(flowRunTable);
   }
 
@@ -91,20 +92,22 @@ public class TestHBaseTimelineStorageSchema {
     conn = ConnectionFactory.createConnection(hbaseConf);
     Admin admin = conn.getAdmin();
 
-    TableName entityTableName = BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME);
+    TableName entityTableName = BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(entityTableName));
     assertTrue(entityTableName.getNameAsString().startsWith(prefix));
-    Table entityTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME));
+    Table entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(entityTable);
 
-    TableName flowRunTableName = BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME);
+    TableName flowRunTableName = BaseTableRW.getTableName(hbaseConf,
+        FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(flowRunTableName));
     assertTrue(flowRunTableName.getNameAsString().startsWith(prefix));
-    Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table flowRunTable = conn.getTable(
+        BaseTableRW.getTableName(hbaseConf,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(flowRunTable);
 
     // create another set with a diff prefix
@@ -114,20 +117,22 @@ public class TestHBaseTimelineStorageSchema {
     hbaseConf.set(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME,
         prefix);
     DataGeneratorForTest.createSchema(hbaseConf);
-    entityTableName = BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME);
+    entityTableName = BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(entityTableName));
     assertTrue(entityTableName.getNameAsString().startsWith(prefix));
-    entityTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        EntityTable.TABLE_NAME_CONF_NAME, EntityTable.DEFAULT_TABLE_NAME));
+    entityTable = conn.getTable(BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(entityTable);
 
-    flowRunTableName = BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME);
+    flowRunTableName = BaseTableRW.getTableName(hbaseConf,
+        FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME);
     assertTrue(admin.tableExists(flowRunTableName));
     assertTrue(flowRunTableName.getNameAsString().startsWith(prefix));
-    flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    flowRunTable = conn.getTable(
+        BaseTableRW.getTableName(hbaseConf,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     assertNotNull(flowRunTable);
     hbaseConf
     .unset(YarnConfiguration.TIMELINE_SERVICE_HBASE_SCHEMA_PREFIX_NAME);

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java
index 4bf221e..645b7d5 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowActivity.java
@@ -52,9 +52,9 @@ import org.apache.hadoop.yarn.server.timelineservice.reader.TimelineReaderContex
 import org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTest;
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl;
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineSchemaUtils;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -161,8 +161,8 @@ public class TestHBaseStorageFlowActivity {
     Connection conn = ConnectionFactory.createConnection(c1);
     // check in flow activity table
     Table table1 = conn.getTable(
-        BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME,
-            FlowActivityTable.DEFAULT_TABLE_NAME));
+        BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME,
+            FlowActivityTableRW.DEFAULT_TABLE_NAME));
     byte[] startRow =
         new FlowActivityRowKey(cluster, minStartTs, user, flow).getRowKey();
     Get g = new Get(startRow);
@@ -178,7 +178,7 @@ public class TestHBaseStorageFlowActivity {
     assertEquals(cluster, flowActivityRowKey.getClusterId());
     assertEquals(user, flowActivityRowKey.getUserId());
     assertEquals(flow, flowActivityRowKey.getFlowName());
-    Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(minStartTs);
+    Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(minStartTs);
     assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
     assertEquals(1, values.size());
     checkFlowActivityRunId(runid, flowVersion, values);
@@ -292,8 +292,8 @@ public class TestHBaseStorageFlowActivity {
     s.setStopRow(stopRow);
     Connection conn = ConnectionFactory.createConnection(c1);
     Table table1 = conn.getTable(
-        BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME,
-            FlowActivityTable.DEFAULT_TABLE_NAME));
+        BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME,
+            FlowActivityTableRW.DEFAULT_TABLE_NAME));
     ResultScanner scanner = table1.getScanner(s);
     int rowCount = 0;
     for (Result result : scanner) {
@@ -309,7 +309,7 @@ public class TestHBaseStorageFlowActivity {
       assertEquals(cluster, flowActivityRowKey.getClusterId());
       assertEquals(user, flowActivityRowKey.getUserId());
       assertEquals(flow, flowActivityRowKey.getFlowName());
-      Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(
+      Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(
           appCreatedTime);
       assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
       assertEquals(1, values.size());
@@ -401,7 +401,7 @@ public class TestHBaseStorageFlowActivity {
         assertEquals(user, flowActivity.getUser());
         assertEquals(flow, flowActivity.getFlowName());
         long dayTs =
-            HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(appCreatedTime);
+            HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(appCreatedTime);
         assertEquals(dayTs, flowActivity.getDate().getTime());
         Set<FlowRunEntity> flowRuns = flowActivity.getFlowRuns();
         assertEquals(3, flowRuns.size());
@@ -442,8 +442,8 @@ public class TestHBaseStorageFlowActivity {
     s.setStopRow(stopRow);
     Connection conn = ConnectionFactory.createConnection(c1);
     Table table1 = conn.getTable(
-        BaseTable.getTableName(c1, FlowActivityTable.TABLE_NAME_CONF_NAME,
-            FlowActivityTable.DEFAULT_TABLE_NAME));
+        BaseTableRW.getTableName(c1, FlowActivityTableRW.TABLE_NAME_CONF_NAME,
+            FlowActivityTableRW.DEFAULT_TABLE_NAME));
     ResultScanner scanner = table1.getScanner(s);
     int rowCount = 0;
     for (Result result : scanner) {
@@ -456,7 +456,7 @@ public class TestHBaseStorageFlowActivity {
       assertEquals(cluster, flowActivityRowKey.getClusterId());
       assertEquals(user, flowActivityRowKey.getUserId());
       assertEquals(flow, flowActivityRowKey.getFlowName());
-      Long dayTs = HBaseTimelineStorageUtils.getTopOfTheDayTimestamp(
+      Long dayTs = HBaseTimelineSchemaUtils.getTopOfTheDayTimestamp(
           appCreatedTime);
       assertEquals(dayTs, flowActivityRowKey.getDayTimestamp());
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
index 1ad02e1..622b0eb 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRun.java
@@ -62,9 +62,9 @@ import org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTes
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineReaderImpl;
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
 import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader.Field;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
-import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.entity.EntityTableRW;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -91,8 +91,8 @@ public class TestHBaseStorageFlowRun {
   @Test
   public void checkCoProcessorOff() throws IOException, InterruptedException {
     Configuration hbaseConf = util.getConfiguration();
-    TableName table = BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME);
+    TableName table = BaseTableRW.getTableName(hbaseConf,
+        FlowRunTableRW.TABLE_NAME_CONF_NAME, FlowRunTableRW.DEFAULT_TABLE_NAME);
     Connection conn = null;
     conn = ConnectionFactory.createConnection(hbaseConf);
     Admin admin = conn.getAdmin();
@@ -106,9 +106,9 @@ public class TestHBaseStorageFlowRun {
       checkCoprocessorExists(table, true);
     }
 
-    table = BaseTable.getTableName(hbaseConf,
-        FlowActivityTable.TABLE_NAME_CONF_NAME,
-        FlowActivityTable.DEFAULT_TABLE_NAME);
+    table = BaseTableRW.getTableName(hbaseConf,
+        FlowActivityTableRW.TABLE_NAME_CONF_NAME,
+        FlowActivityTableRW.DEFAULT_TABLE_NAME);
     if (admin.tableExists(table)) {
       // check the regions.
       // check in flow activity table
@@ -116,8 +116,8 @@ public class TestHBaseStorageFlowRun {
       checkCoprocessorExists(table, false);
     }
 
-    table = BaseTable.getTableName(hbaseConf, EntityTable.TABLE_NAME_CONF_NAME,
-        EntityTable.DEFAULT_TABLE_NAME);
+    table = BaseTableRW.getTableName(hbaseConf,
+        EntityTableRW.TABLE_NAME_CONF_NAME, EntityTableRW.DEFAULT_TABLE_NAME);
     if (admin.tableExists(table)) {
       // check the regions.
       // check in entity run table
@@ -224,8 +224,10 @@ public class TestHBaseStorageFlowRun {
 
     Connection conn = ConnectionFactory.createConnection(c1);
     // check in flow run table
-    Table table1 = conn.getTable(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table table1 = conn.getTable(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     // scan the table and see that we get back the right min and max
     // timestamps
     byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey();
@@ -380,8 +382,10 @@ public class TestHBaseStorageFlowRun {
         .getRowKey();
     s.setStopRow(stopRow);
     Connection conn = ConnectionFactory.createConnection(c1);
-    Table table1 = conn.getTable(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table table1 = conn.getTable(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     ResultScanner scanner = table1.getScanner(s);
 
     int loopCount = 0;
@@ -525,8 +529,10 @@ public class TestHBaseStorageFlowRun {
         new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey();
     s.setStopRow(stopRow);
     Connection conn = ConnectionFactory.createConnection(c1);
-    Table table1 = conn.getTable(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table table1 = conn.getTable(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     ResultScanner scanner = table1.getScanner(s);
 
     int rowCount = 0;
@@ -810,8 +816,10 @@ public class TestHBaseStorageFlowRun {
       boolean checkMax) throws IOException {
     Connection conn = ConnectionFactory.createConnection(c1);
     // check in flow run table
-    Table table1 = conn.getTable(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table table1 = conn.getTable(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     // scan the table and see that we get back the right min and max
     // timestamps
     byte[] startRow = new FlowRunRowKey(cluster, user, flow, runid).getRowKey();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
index 0ef8260..31be285 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase-tests/src/test/java/org/apache/hadoop/yarn/server/timelineservice/storage/flow/TestHBaseStorageFlowRunCompaction.java
@@ -54,9 +54,9 @@ import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
 import org.apache.hadoop.yarn.server.timelineservice.collector.TimelineCollectorContext;
 import org.apache.hadoop.yarn.server.timelineservice.storage.DataGeneratorForTest;
 import org.apache.hadoop.yarn.server.timelineservice.storage.HBaseTimelineWriterImpl;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTable;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.BaseTableRW;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.ColumnHelper;
-import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineStorageUtils;
+import org.apache.hadoop.yarn.server.timelineservice.storage.common.HBaseTimelineServerUtils;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.LongConverter;
 import org.apache.hadoop.yarn.server.timelineservice.storage.common.TimestampGenerator;
 import org.junit.AfterClass;
@@ -107,8 +107,10 @@ public class TestHBaseStorageFlowRunCompaction {
     Configuration hbaseConf = util.getConfiguration();
     Connection conn = null;
     conn = ConnectionFactory.createConnection(hbaseConf);
-    Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table flowRunTable = conn.getTable(
+        BaseTableRW.getTableName(hbaseConf,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     flowRunTable.put(p);
 
     Get g = new Get(rowKeyBytes);
@@ -156,8 +158,10 @@ public class TestHBaseStorageFlowRunCompaction {
     Configuration hbaseConf = util.getConfiguration();
     Connection conn = null;
     conn = ConnectionFactory.createConnection(hbaseConf);
-    Table flowRunTable = conn.getTable(BaseTable.getTableName(hbaseConf,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table flowRunTable = conn.getTable(
+        BaseTableRW.getTableName(hbaseConf,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     flowRunTable.put(p);
 
     String rowKey2 = "nonNumericRowKey2";
@@ -324,10 +328,12 @@ public class TestHBaseStorageFlowRunCompaction {
 
     // check in flow run table
     HRegionServer server = util.getRSForFirstRegionInTable(
-        BaseTable.getTableName(c1, FlowRunTable.TABLE_NAME_CONF_NAME,
-            FlowRunTable.DEFAULT_TABLE_NAME));
-    List<Region> regions = server.getOnlineRegions(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+        BaseTableRW.getTableName(c1, FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
+    List<Region> regions = server.getOnlineRegions(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     assertTrue("Didn't find any regions for primary table!",
         regions.size() > 0);
     // flush and compact all the regions of the primary table
@@ -352,8 +358,10 @@ public class TestHBaseStorageFlowRunCompaction {
         new FlowRunRowKey(clusterStop, user, flow, runid).getRowKey();
     s.setStopRow(stopRow);
     Connection conn = ConnectionFactory.createConnection(c1);
-    Table table1 = conn.getTable(BaseTable.getTableName(c1,
-        FlowRunTable.TABLE_NAME_CONF_NAME, FlowRunTable.DEFAULT_TABLE_NAME));
+    Table table1 = conn.getTable(
+        BaseTableRW.getTableName(c1,
+            FlowRunTableRW.TABLE_NAME_CONF_NAME,
+            FlowRunTableRW.DEFAULT_TABLE_NAME));
     ResultScanner scanner = table1.getScanner(s);
 
     int rowCount = 0;
@@ -420,7 +428,7 @@ public class TestHBaseStorageFlowRunCompaction {
     tags.add(t);
     byte[] tagByteArray = Tag.fromList(tags);
     // create a cell with a VERY old timestamp and attribute SUM_FINAL
-    Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, cell1Ts, Bytes.toBytes(cellValue1), tagByteArray);
     currentColumnCells.add(c1);
 
@@ -430,7 +438,7 @@ public class TestHBaseStorageFlowRunCompaction {
     tags.add(t);
     tagByteArray = Tag.fromList(tags);
     // create a cell with a recent timestamp and attribute SUM_FINAL
-    Cell c2 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c2 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, cell2Ts, Bytes.toBytes(cellValue2), tagByteArray);
     currentColumnCells.add(c2);
 
@@ -440,7 +448,7 @@ public class TestHBaseStorageFlowRunCompaction {
     tags.add(t);
     tagByteArray = Tag.fromList(tags);
     // create a cell with a VERY old timestamp but has attribute SUM
-    Cell c3 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c3 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, cell3Ts, Bytes.toBytes(cellValue3), tagByteArray);
     currentColumnCells.add(c3);
 
@@ -450,7 +458,7 @@ public class TestHBaseStorageFlowRunCompaction {
     tags.add(t);
     tagByteArray = Tag.fromList(tags);
     // create a cell with a VERY old timestamp but has attribute SUM
-    Cell c4 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c4 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, cell4Ts, Bytes.toBytes(cellValue4), tagByteArray);
     currentColumnCells.add(c4);
 
@@ -520,7 +528,7 @@ public class TestHBaseStorageFlowRunCompaction {
       tags.add(t);
       byte[] tagByteArray = Tag.fromList(tags);
       // create a cell with a VERY old timestamp and attribute SUM_FINAL
-      c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier,
+      c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier,
           cellTsFinal, Bytes.toBytes(cellValueFinal), tagByteArray);
       currentColumnCells.add(c1);
       cellTsFinal++;
@@ -534,7 +542,7 @@ public class TestHBaseStorageFlowRunCompaction {
       tags.add(t);
       byte[] tagByteArray = Tag.fromList(tags);
       // create a cell with attribute SUM
-      c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier,
+      c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier,
           cellTsNotFinal, Bytes.toBytes(cellValueNotFinal), tagByteArray);
       currentColumnCells.add(c1);
       cellTsNotFinal++;
@@ -611,7 +619,7 @@ public class TestHBaseStorageFlowRunCompaction {
       tags.add(t);
       byte[] tagByteArray = Tag.fromList(tags);
       // create a cell with a VERY old timestamp and attribute SUM_FINAL
-      c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier,
+      c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier,
           cellTsFinal, Bytes.toBytes(cellValueFinal), tagByteArray);
       currentColumnCells.add(c1);
       cellTsFinal++;
@@ -625,7 +633,7 @@ public class TestHBaseStorageFlowRunCompaction {
       tags.add(t);
       byte[] tagByteArray = Tag.fromList(tags);
       // create a cell with a VERY old timestamp and attribute SUM_FINAL
-      c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier,
+      c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier,
           cellTsFinalNotExpire, Bytes.toBytes(cellValueFinal), tagByteArray);
       currentColumnCells.add(c1);
       cellTsFinalNotExpire++;
@@ -639,7 +647,7 @@ public class TestHBaseStorageFlowRunCompaction {
       tags.add(t);
       byte[] tagByteArray = Tag.fromList(tags);
       // create a cell with attribute SUM
-      c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily, aQualifier,
+      c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily, aQualifier,
           cellTsNotFinal, Bytes.toBytes(cellValueNotFinal), tagByteArray);
       currentColumnCells.add(c1);
       cellTsNotFinal++;
@@ -696,7 +704,7 @@ public class TestHBaseStorageFlowRunCompaction {
     SortedSet<Cell> currentColumnCells = new TreeSet<Cell>(KeyValue.COMPARATOR);
 
     // create a cell with a VERY old timestamp and attribute SUM_FINAL
-    Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, 120L, Bytes.toBytes(cellValue1), tagByteArray);
     currentColumnCells.add(c1);
 
@@ -707,7 +715,7 @@ public class TestHBaseStorageFlowRunCompaction {
     tagByteArray = Tag.fromList(tags);
 
     // create a cell with a VERY old timestamp but has attribute SUM
-    Cell c2 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c2 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, 130L, Bytes.toBytes(cellValue2), tagByteArray);
     currentColumnCells.add(c2);
     List<Cell> cells = fs.processSummationMajorCompaction(currentColumnCells,
@@ -754,7 +762,7 @@ public class TestHBaseStorageFlowRunCompaction {
     SortedSet<Cell> currentColumnCells = new TreeSet<Cell>(KeyValue.COMPARATOR);
 
     // create a cell with a VERY old timestamp
-    Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, 120L, Bytes.toBytes(1110L), tagByteArray);
     currentColumnCells.add(c1);
 
@@ -792,7 +800,7 @@ public class TestHBaseStorageFlowRunCompaction {
 
     SortedSet<Cell> currentColumnCells = new TreeSet<Cell>(KeyValue.COMPARATOR);
 
-    Cell c1 = HBaseTimelineStorageUtils.createNewCell(aRowKey, aFamily,
+    Cell c1 = HBaseTimelineServerUtils.createNewCell(aRowKey, aFamily,
         aQualifier, currentTimestamp, Bytes.toBytes(1110L), tagByteArray);
     currentColumnCells.add(c1);
     List<Cell> cells = fs.processSummationMajorCompaction(currentColumnCells,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9af30d46/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
new file mode 100644
index 0000000..a1db497
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-timelineservice-hbase/hadoop-yarn-server-timelineservice-hbase-client/pom.xml
@@ -0,0 +1,219 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+                             http://maven.apache.org/xsd/maven-4.0.0.xsd">
+  <parent>
+    <artifactId>hadoop-yarn-server-timelineservice-hbase</artifactId>
+    <groupId>org.apache.hadoop</groupId>
+    <version>3.2.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hadoop-yarn-server-timelineservice-hbase-client</artifactId>
+  <name>Apache Hadoop YARN TimelineService HBase Client</name>
+
+  <properties>
+    <!-- Needed for generating FindBugs warnings using parent pom -->
+    <yarn.basedir>${project.parent.parent.parent.basedir}</yarn.basedir>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-timelineservice-hbase-common</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-logging</groupId>
+      <artifactId>commons-logging</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-lang</groupId>
+      <artifactId>commons-lang</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>commons-cli</groupId>
+      <artifactId>commons-cli</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>com.google.guava</groupId>
+      <artifactId>guava</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-annotations</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <!-- 'mvn dependency:analyze' fails to detect use of this dependency -->
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-common</artifactId>
+      <type>test-jar</type>
+      <scope>test</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-api</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-common</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-applicationhistoryservice</artifactId>
+      <scope>provided</scope>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hadoop</groupId>
+      <artifactId>hadoop-yarn-server-timelineservice</artifactId>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-common</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-client</artifactId>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>org.apache.hbase</groupId>
+      <artifactId>hbase-server</artifactId>
+      <scope>provided</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-hdfs-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-client</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.apache.hadoop</groupId>
+          <artifactId>hadoop-mapreduce-client-core</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-util</artifactId>
+        </exclusion>
+        <exclusion>
+          <groupId>org.mortbay.jetty</groupId>
+          <artifactId>jetty-sslengine</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-jar-plugin</artifactId>
+        <executions>
+          <execution>
+            <goals>
+              <goal>test-jar</goal>
+            </goals>
+            <phase>test-compile</phase>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-javadoc-plugin</artifactId>
+        <configuration>
+          <additionalDependencies>
+            <additionnalDependency>
+              <groupId>junit</groupId>
+              <artifactId>junit</artifactId>
+              <version>4.11</version>
+            </additionnalDependency>
+          </additionalDependencies>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>copy-dependencies</goal>
+            </goals>
+            <configuration>
+              <includeScope>runtime</includeScope>
+              <excludeGroupIds>org.slf4j,org.apache.hadoop,com.github.stephenc.findbugs</excludeGroupIds>
+              <outputDirectory>${project.build.directory}/lib</outputDirectory>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org