You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lens.apache.org by am...@apache.org on 2015/05/21 12:49:13 UTC
[1/2] incubator-lens git commit: LENS-562 : Add cli command for
getting timeline of all facts or single fact or single fact+storage pair.
(Rajat Khandelwal via amareshwari)
Repository: incubator-lens
Updated Branches:
refs/heads/master 1d82fd76d -> e0b0c4c55
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/src/site/apt/user/cli.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/user/cli.apt b/src/site/apt/user/cli.apt
index bd39497..8efb58b 100644
--- a/src/site/apt/user/cli.apt
+++ b/src/site/apt/user/cli.apt
@@ -86,21 +86,21 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|set \<key\>=\<value\>|Assign <<<value>>> to session parameter specified with <<<key>>> on lens server|
+|add file [--path] \<path-to-file-on-server-side\>|Adds a file resource to session|
*--+--+
|add jar [--path] \<path-to-jar-on-server-side\>|Adds jar resource to the session|
*--+--+
-|remove jar [--path] \<path-to-jar-on-server-side\>|Removes a jar resource from session|
-*--+--+
-|add file [--path] \<path-to-file-on-server-side\>|Adds a file resource to session|
+|close/bye|Releases all resources of the server session and exits the shell|
*--+--+
-|remove file [--path] \<path-to-file-on-server-side\>|removes a file resource from session|
+|get [--key] \<key\>|Fetches and prints session parameter specified with name <<<key>>> from lens server|
*--+--+
|list resources [[--type] \<resource-type\>]|list all resources from session. If type is provided, lists resources of type <<<resource-type>>>. Valid values for type are jar and file.|
*--+--+
-|close/bye|Releases all resources of the server session and exits the shell|
+|remove file [--path] \<path-to-file-on-server-side\>|removes a file resource from session|
*--+--+
-|get [--key] \<key\>|Fetches and prints session parameter specified with name <<<key>>> from lens server|
+|remove jar [--path] \<path-to-jar-on-server-side\>|Removes a jar resource from session|
+*--+--+
+|set \<key\>=\<value\>|Assign <<<value>>> to session parameter specified with <<<key>>> on lens server|
*--+--+
|show params|Fetches and prints all session parameter from lens server|
*--+--+
@@ -115,14 +115,14 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|drop database [--db] \<database-name\>|drop a database with specified name|
-*--+--+
-|use [--db] \<database-name\>|change to new database|
-*--+--+
|create database [--db] \<database-name\> [--ignoreIfExists \<ignore-if-exists\>]|create a database with specified name. if <<<ignore-if-exists>>> is true, create will not be tried if already exists. Default is false|
*--+--+
+|drop database [--db] \<database-name\>|drop a database with specified name|
+*--+--+
|show databases|displays list of all databases|
*--+--+
+|use [--db] \<database-name\>|change to new database|
+*--+--+
<<Lens Database Commands>>
===
@@ -134,15 +134,15 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|show storages|list all storages|
-*--+--+
|create storage [--path] \<path-to-storage-spec\>|Create a new Storage from file <<<path-to-storage-spec>>>|
*--+--+
+|describe storage [--name] \<storage-name\>|describe storage <<<storage-name>>>|
+*--+--+
|drop storage [--name] \<storage-name\>|drop storage <<<storage-name>>>|
*--+--+
-|update storage [--name] \<storage-name\> [--path] \<path-to-storage-spec\>|update storage <<<storage-name>>> with storage spec from <<<path-to-storage-spec>>>|
+|show storages|list all storages|
*--+--+
-|describe storage [--name] \<storage-name\>|describe storage <<<storage-name>>>|
+|update storage [--name] \<storage-name\> [--path] \<path-to-storage-spec\>|update storage <<<storage-name>>> with storage spec from <<<path-to-storage-spec>>>|
*--+--+
<<Lens Storage Commands>>
@@ -155,17 +155,17 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|show cubes|show list of cubes in current database|
-*--+--+
|create cube [--path] \<path-to-cube-spec-file\>|Create a new Cube, taking spec from <<<path-to-cube-spec-file>>>|
*--+--+
-|describe cube [--name] \<cube_name\>|describe cube with name <<<cube_name>>>|
+|cube latestdate [--cube] \<cube_name\> [--time_dimension] \<time_dimension\>|get latest date of data available in cube <<<cube_name>>> for time dimension <<<time_dimension_name>>>. Instead of time dimension, partition column can be directly passed as <<<time_dimension>>>|
*--+--+
-|update cube [--name] \<cube_name\> [--path] \<path-to-cube-spec-file\>|update cube <<<cube_name>>> with spec from <<<path-to-cube-spec-file>>>|
+|describe cube [--name] \<cube_name\>|describe cube with name <<<cube_name>>>|
*--+--+
|drop cube [--name] \<cube_name\>|drop cube <<<cube_name>>>|
*--+--+
-|cube latestdate [--cube] \<cube_name\> [--timeDimension] \<time_dimension_name\>|get latest date of data available in cube <<<cube_name>>> for time dimension <<<time_dimension_name>>>|
+|show cubes|show list of cubes in current database|
+*--+--+
+|update cube [--name] \<cube_name\> [--path] \<path-to-cube-spec-file\>|update cube <<<cube_name>>> with spec from <<<path-to-cube-spec-file>>>|
*--+--+
<<Lens Cube Commands>>
@@ -178,16 +178,16 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|show dimensions|show list of all dimensions in current database|
-*--+--+
|create dimension [--path] \<path-to-dimension-spec file\>|Create a new Dimension, taking spec from <<<path-to-dimension-spec file>>>|
*--+--+
|describe dimension [--name] \<dimension_name\>|describe dimension <<<dimension_name>>>|
*--+--+
-|update dimension [--name] \<dimension_name\> [--path] \<path-to-dimension-spec-file\>|update dimension <<<dimension_name>>>, taking spec from <<<path-to-dimension-spec file>>>|
-*--+--+
|drop dimension [--name] \<dimension_name\>|drop dimension <<<dimension_name>>>|
*--+--+
+|show dimensions|show list of all dimensions in current database|
+*--+--+
+|update dimension [--name] \<dimension_name\> [--path] \<path-to-dimension-spec-file\>|update dimension <<<dimension_name>>>, taking spec from <<<path-to-dimension-spec file>>>|
+*--+--+
<<Lens Dimension Commands>>
===
@@ -199,33 +199,35 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|show facts [[--cube_name] \<cube_name\>]|display list of fact tables in current database. If optional <<<cube_name>>> is supplied, only facts belonging to cube <<<cube_name>>> will be displayed|
-*--+--+
|create fact [--path] \<path-to-fact-spec-file\>|create a fact table with spec from <<<path-to-fact-spec-file>>>|
*--+--+
|describe fact [--fact_name] \<fact_name\>|describe fact <<<fact_name>>>|
*--+--+
-|update fact [--fact_name] \<fact_name\> [--path] \<path-to-fact-spec\>|update fact <<<fact_name>>> taking spec from <<<path-to-fact-spec>>>|
-*--+--+
|drop fact [--fact_name] \<fact_name\> [--cascade \<cascade\>]|drops fact <<<fact_name>>>. If <<<cascade>>> is true, all the storage tables associated with the fact <<<fact_name>>> are also dropped. By default <<<cascade>>> is false|
*--+--+
-|fact list storage [--fact_name] \<fact_name\>|display list of storages associated to fact <<<fact_name>>>|
+|fact add partitions [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [--path] \<partition-list-spec-path\>|add multiple partition to fact <<<fact_name>>>'s storage <<<storage_name>>>, reading partition list spec from <<<partition-list-spec-path>>>|
+*--+--+
+|fact add single-partition [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [--path] \<partition-spec-path\>|add single partition to fact <<<fact_name>>>'s storage <<<storage_name>>>, reading spec from <<<partition-spec-path>>>|
*--+--+
|fact add storage [--fact_name] \<fact_name\> [--path] \<path-to-storage-spec\>|adds a new storage to fact <<<fact_name>>>, taking storage spec from <<<path-to-storage-spec>>>|
*--+--+
-|fact get storage [--fact_name] \<fact_name\> [--storage_name] \<path-to-storage-spec\>|describe storage <<<storage_name>>> of fact <<<fact_name>>>|
+|fact drop all storages [--fact_name] \<fact_name\>|drop all storages associated to fact <<<fact_name>>>|
+*--+--+
+|fact drop partitions [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|drop all partitions associated with fact <<<fact_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
*--+--+
|fact drop storage [--fact_name] \<fact_name\> [--storage_name] \<storage_name\>|drop storage <<<storage_name>>> from fact <<<fact_name>>>|
*--+--+
-|fact drop all storages [--fact_name] \<fact_name\>|drop all storages associated to fact <<<fact_name>>>|
+|fact get storage [--fact_name] \<fact_name\> [--storage_name] \<path-to-storage-spec\>|describe storage <<<storage_name>>> of fact <<<fact_name>>>|
*--+--+
|fact list partitions [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|get all partitions associated with fact <<<fact_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
*--+--+
-|fact drop partitions [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|drop all partitions associated with fact <<<fact_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
+|fact list storage [--fact_name] \<fact_name\>|display list of storages associated to fact <<<fact_name>>>|
*--+--+
-|fact add single-partition [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [--path] \<partition-spec-path\>|add single partition to fact <<<fact_name>>>'s storage <<<storage_name>>>, reading spec from <<<partition-spec-path>>>|
+|fact timelines [--fact_name] \<fact_name\> [--storage_name \<storage_name\>] [--update_period \<update_period\>] [--time_dimension \<time_dimension\>]|get timelines for fact. Can optionally specify storage, update period and time dimension to filter by. Instead of time dimension, partition column can be directly passed as <<<time_dimension>>>|
*--+--+
-|fact add partitions [--fact_name] \<fact_name\> [--storage_name] \<storage_name\> [--path] \<partition-list-spec-path\>|add multiple partition to fact <<<fact_name>>>'s storage <<<storage_name>>>, reading partition list spec from <<<partition-list-spec-path>>>|
+|show facts [[--cube_name] \<cube_name\>]|display list of fact tables in current database. If optional <<<cube_name>>> is supplied, only facts belonging to cube <<<cube_name>>> will be displayed|
+*--+--+
+|update fact [--fact_name] \<fact_name\> [--path] \<path-to-fact-spec\>|update fact <<<fact_name>>> taking spec from <<<path-to-fact-spec>>>|
*--+--+
<<Lens Fact Commands>>
@@ -238,33 +240,33 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|dimtable drop all storages [--dimtable_name] \<dimtable_name\>|drop all storages associated to dimension table|
+|create dimtable [--path] \<path-to-dimtable-spec-file\>|Create a new dimension table taking spec from <<<path-to-dimtable-spec-file>>>|
*--+--+
-|dimtable list partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|get all partitions associated with dimtable <<<dimtable_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
+|describe dimtable [--dimtable_name] \<dimtable_name\>|describe dimtable <<<dimtable_name>>>|
*--+--+
-|dimtable drop partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|drop all partitions associated with dimtable <<<dimtable_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
+|dimtable add partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [--path] \<partition-list-spec-path\>|add multiple partition to dimtable <<<dimtable_name>>>'s storage <<<storage_name>>>, reading partition list spec from <<<partition-list-spec-path>>>|
*--+--+
|dimtable add single-partition [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [--path] \<partition-spec-path\>|add single partition to dimtable <<<dimtable_name>>>'s storage <<<storage_name>>>, reading spec from <<<partition-spec-path>>>|
*--+--+
-|dimtable add partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [--path] \<partition-list-spec-path\>|add multiple partition to dimtable <<<dimtable_name>>>'s storage <<<storage_name>>>, reading partition list spec from <<<partition-list-spec-path>>>|
+|dimtable add storage [--dimtable_name] \<dimtable_name\> [--path] \<path-to-storage-spec\>|adds a new storage to dimtable <<<dimtable_name>>>, taking storage spec from <<<path-to-storage-spec>>>|
*--+--+
-|show dimtables [[--dimension_name] \<dimension_name\>]|display list of dimtables in current database. If optional <<<dimension_name>>> is supplied, only facts belonging to dimension <<<dimension_name>>> will be displayed|
+|dimtable drop all storages [--dimtable_name] \<dimtable_name\>|drop all storages associated to dimension table|
*--+--+
-|create dimtable [--path] \<path-to-dimtable-spec-file\>|Create a new dimension table taking spec from <<<path-to-dimtable-spec-file>>>|
+|dimtable drop partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|drop all partitions associated with dimtable <<<dimtable_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
*--+--+
-|describe dimtable [--dimtable_name] \<dimtable_name\>|describe dimtable <<<dimtable_name>>>|
+|dimtable drop storage [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\>|drop storage <<<storage_name>>> from dimtable <<<dimtable_name>>>|
*--+--+
-|update dimtable [--dimtable_name] \<dimtable_name\> [--path] \<path-to-dimtable-spec\>|update dimtable <<<dimtable_name>>> taking spec from <<<path-to-dimtable-spec>>>|
+|dimtable get storage [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\>|describe storage <<<storage_name>>> of dimtable <<<dimtable_name>>>|
*--+--+
-|drop dimtable [--dimtable_name] \<dimtable_name\> [--cascade \<cascade\>]|drop dimtable <<<dimtable_name>>>. If <<<cascade>>> is true, all the storage tables associated with the dimtable <<<dimtable_name>>> are also dropped. By default <<<cascade>>> is false|
+|dimtable list partitions [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\> [[--filter] \<partition-filter\>]|get all partitions associated with dimtable <<<dimtable_name>>>, storage <<<storage_name>>> filtered by <<<partition-filter>>>|
*--+--+
|dimtable list storages [--dimtable_name] \<dimtable_name\>|display list of storage associated to dimtable <<<dimtable_name>>>|
*--+--+
-|dimtable add storage [--dimtable_name] \<dimtable_name\> [--path] \<path-to-storage-spec\>|adds a new storage to dimtable <<<dimtable_name>>>, taking storage spec from <<<path-to-storage-spec>>>|
+|drop dimtable [--dimtable_name] \<dimtable_name\> [--cascade \<cascade\>]|drop dimtable <<<dimtable_name>>>. If <<<cascade>>> is true, all the storage tables associated with the dimtable <<<dimtable_name>>> are also dropped. By default <<<cascade>>> is false|
*--+--+
-|dimtable get storage [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\>|describe storage <<<storage_name>>> of dimtable <<<dimtable_name>>>|
+|show dimtables [[--dimension_name] \<dimension_name\>]|display list of dimtables in current database. If optional <<<dimension_name>>> is supplied, only facts belonging to dimension <<<dimension_name>>> will be displayed|
*--+--+
-|dimtable drop storage [--dimtable_name] \<dimtable_name\> [--storage_name] \<storage_name\>|drop storage <<<storage_name>>> from dimtable <<<dimtable_name>>>|
+|update dimtable [--dimtable_name] \<dimtable_name\> [--path] \<path-to-dimtable-spec\>|update dimtable <<<dimtable_name>>> taking spec from <<<path-to-dimtable-spec>>>|
*--+--+
<<Lens Dimension Table Commands>>
@@ -277,10 +279,10 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|show nativetables|show list of native tables belonging to current database|
-*--+--+
|describe nativetable [--name] \<native-table-name\>|describe nativetable named <<<native-table-name>>>|
*--+--+
+|show nativetables|show list of native tables belonging to current database|
+*--+--+
<<Lens Native Table Commands>>
===
@@ -300,31 +302,31 @@ User CLI Commands
*--+--+
|<<Command>>|<<Description>>|
*--+--+
-|query status [--query_handle] \<query_handle\>|Fetch status of executed query having query handle <<<query_handle>>>|
+|prepQuery destroy [--prepare_handle] \<prepare_handle\>|Destroy prepared query with handle <<<prepare_handle>>>|
*--+--+
-|query details [--query_handle] \<query_handle\>|Get query details of query with handle <<<query_handle>>>|
+|prepQuery details [--prepare_handle] \<prepare_handle\>|Get prepared query with handle <<<prepare_handle>>>|
*--+--+
-|query explain [--query] \<query-string\> [--save_location \<save_location\>]|Explain execution plan of query <<<query-string>>>. Can optionally save the plan to a file by providing <<<save_location>>>|
+|prepQuery execute [--prepare_handle] Prepare handle to execute [--async \<async\>] [--name \<query-name\>]|Execute prepared query with handle <<<prepare_handle>>>. If <<<async>>> is supplied and is true, query is run in async manner and query handle is returned immediately. Optionally, <<<query-name>>> can be provided, though not required.|
*--+--+
-|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally), as can be seen from the command syntax|
+|prepQuery explain [--query] \<query-string\> [--name \<query-name\>]|Explain and prepare query <<<query-string>>>. Can optionally provide <<<query-name>>>|
*--+--+
-|query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required|
+|prepQuery list [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all prepared queries. Various filters can be provided(optionally) as can be seen from command syntax|
*--+--+
-|query kill [--query_handle] \<query_handle\>|Kill query with handle <<<query_handle>>>|
+|prepQuery prepare [--query] \<query-string\> [--name \<query-name\>]|Prepapre query <<<query-string>>> and return prepare handle. Can optionaly provide <<<query-name>>>|
*--+--+
-|query results [--query_handle] \<query_handle\>|get results of async query with query handle <<<query_handle>>>|
+|query details [--query_handle] \<query_handle\>|Get query details of query with handle <<<query_handle>>>|
*--+--+
-|prepQuery list [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all prepared queries. Various filters can be provided(optionally) as can be seen from command syntax|
+|query execute [--query] \<query-string\> [--async \<async\>] [--name \<query-name\>]|Execute query <<<query-string>>>. If <<<async>>> is true, The query is launched in async manner and query handle is returned. It's by default false. <<<query name>>> can also be provided, though not required|
*--+--+
-|prepQuery details [--prepare_handle] \<prepare_handle\>|Get prepared query with handle <<<prepare_handle>>>|
+|query explain [--query] \<query-string\> [--save_location \<save_location\>]|Explain execution plan of query <<<query-string>>>. Can optionally save the plan to a file by providing <<<save_location>>>|
*--+--+
-|prepQuery destroy [--prepare_handle] \<prepare_handle\>|Destroy prepared query with handle <<<prepare_handle>>>|
+|query kill [--query_handle] \<query_handle\>|Kill query with handle <<<query_handle>>>|
*--+--+
-|prepQuery execute [--prepare_handle] Prepare handle to execute [--async \<async\>] [--name \<query-name\>]|Execute prepared query with handle <<<prepare_handle>>>. If <<<async>>> is supplied and is true, query is run in async manner and query handle is returned immediately. Optionally, <<<query-name>>> can be provided, though not required.|
+|query list [--state \<query-status\>] [--name \<query-name\>] [--user \<user-who-submitted-query\>] [--fromDate \<submission-time-is-after\>] [--toDate \<submission-time-is-before\>]|Get all queries. Various filter options can be provided(optionally), as can be seen from the command syntax|
*--+--+
-|prepQuery explain [--query] \<query-string\> [--name \<query-name\>]|Explain and prepare query <<<query-string>>>. Can optionally provide <<<query-name>>>|
+|query results [--query_handle] \<query_handle\>|get results of async query with query handle <<<query_handle>>>|
*--+--+
-|prepQuery prepare [--query] \<query-string\> [--name \<query-name\>]|Prepapre query <<<query-string>>> and return prepare handle. Can optionaly provide <<<query-name>>>|
+|query status [--query_handle] \<query_handle\>|Fetch status of executed query having query handle <<<query_handle>>>|
*--+--+
<<Lens Query Commands>>
[2/2] incubator-lens git commit: LENS-562 : Add cli command for
getting timeline of all facts or single fact or single fact+storage pair.
(Rajat Khandelwal via amareshwari)
Posted by am...@apache.org.
LENS-562 : Add cli command for getting timeline of all facts or single fact or single fact+storage pair. (Rajat Khandelwal via amareshwari)
Project: http://git-wip-us.apache.org/repos/asf/incubator-lens/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-lens/commit/e0b0c4c5
Tree: http://git-wip-us.apache.org/repos/asf/incubator-lens/tree/e0b0c4c5
Diff: http://git-wip-us.apache.org/repos/asf/incubator-lens/diff/e0b0c4c5
Branch: refs/heads/master
Commit: e0b0c4c55169b7a73b20d353290d41c083c27487
Parents: 1d82fd7
Author: Rajat Khandelwal <pr...@apache.org>
Authored: Thu May 21 16:18:50 2015 +0530
Committer: Amareshwari Sriramadasu <am...@apache.org>
Committed: Thu May 21 16:18:50 2015 +0530
----------------------------------------------------------------------
.../lens/cli/commands/LensCubeCommands.java | 5 +-
.../lens/cli/commands/LensFactCommands.java | 11 +
.../apache/lens/cli/TestLensFactCommands.java | 102 ++++--
.../lens/cli/doc/TestGenerateCLIUserDoc.java | 41 ++-
.../java/org/apache/lens/client/LensClient.java | 7 +
.../apache/lens/client/LensMetadataClient.java | 11 +
.../lens/cube/metadata/CubeMetastoreClient.java | 40 ++-
.../metadata/timeline/PartitionTimeline.java | 12 +-
.../apache/lens/cube/parse/CubeTestSetup.java | 346 ++++++++++---------
.../api/metastore/CubeMetastoreService.java | 2 +
.../metastore/CubeMetastoreServiceImpl.java | 13 +
.../server/metastore/MetastoreResource.java | 27 ++
.../org/apache/lens/server/LensTestUtil.java | 1 -
.../lens/server/query/TestQueryService.java | 11 +-
src/site/apt/user/cli.apt | 124 +++----
15 files changed, 468 insertions(+), 285 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
index 1c83b54..d05d7a5 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensCubeCommands.java
@@ -103,10 +103,11 @@ public class LensCubeCommands extends LensCRUDCommand<XCube> {
* @return the string
*/
@CliCommand(value = "cube latestdate",
- help = "get latest date of data available in cube <cube_name> for time dimension <time_dimension_name>")
+ help = "get latest date of data available in cube <cube_name> for time dimension <time_dimension_name>. "
+ + " Instead of time dimension, partition column can be directly passed as <time_dimension>")
public String getLatest(
@CliOption(key = {"", "cube"}, mandatory = true, help = "<cube_name>") String cube,
- @CliOption(key = {"", "timeDimension"}, mandatory = true, help = "<time_dimension_name>") String timeDim) {
+ @CliOption(key = {"", "time_dimension"}, mandatory = true, help = "<time_dimension>") String timeDim) {
Date dt = getClient().getLatestDateOfCube(cube, timeDim);
return dt == null ? "No Data Available" : formatDate(dt);
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
index 7ef7c23..24992b9 100644
--- a/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
+++ b/lens-cli/src/main/java/org/apache/lens/cli/commands/LensFactCommands.java
@@ -237,6 +237,17 @@ public class LensFactCommands extends LensCRUDStoragePartitionCommand<XFactTable
return addPartitions(tableName, storageName, path);
}
+ @CliCommand(value = "fact timelines",
+ help = "get timelines for fact. Can optionally specify storage, update period and time dimension to filter by."
+ + " Instead of time dimension, partition column can be directly passed as <time_dimension>")
+ public List<String> getTimelines(
+ @CliOption(key = {"", "fact_name"}, mandatory = true, help = "<fact_name>") String factName,
+ @CliOption(key = {"storage_name"}, mandatory = false, help = "<storage_name>") String storageName,
+ @CliOption(key = {"update_period"}, mandatory = false, help = "<update_period>") String updatePeriod,
+ @CliOption(key = {"time_dimension"}, mandatory = false, help = "<time_dimension>") String timeDimension) {
+ return getClient().getPartitionTimelines(factName, storageName, updatePeriod, timeDimension);
+ }
+
@Override
protected XStorageTableElement readStorage(String tableName, String storage) {
return getClient().getStorageFromFact(tableName, storage);
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
index 195bd43..19979cc 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/TestLensFactCommands.java
@@ -18,9 +18,12 @@
*/
package org.apache.lens.cli;
+import static org.testng.Assert.*;
+
import java.io.*;
import java.net.URISyntaxException;
import java.net.URL;
+import java.util.List;
import javax.ws.rs.NotFoundException;
@@ -30,7 +33,6 @@ import org.apache.lens.client.LensClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import org.testng.Assert;
import org.testng.annotations.Test;
/**
@@ -67,7 +69,7 @@ public class TestLensFactCommands extends LensCliApplicationTest {
private void createSampleCube() throws URISyntaxException {
URL cubeSpec = TestLensCubeCommands.class.getClassLoader().getResource("sample-cube.xml");
String cubeList = getCubeCommand().showCubes();
- Assert.assertFalse(cubeList.contains("sample_cube"), cubeList);
+ assertFalse(cubeList.contains("sample_cube"), cubeList);
getCubeCommand().createCube(new File(cubeSpec.toURI()).getAbsolutePath());
}
@@ -101,31 +103,31 @@ public class TestLensFactCommands extends LensCliApplicationTest {
public static void addFact1Table() throws IOException {
LensFactCommands command = getCommand();
String factList = command.showFacts(null);
- Assert.assertEquals(command.showFacts("sample_cube"), "No fact found for sample_cube");
- Assert.assertEquals(factList, "No fact found", "Fact tables should not be found");
+ assertEquals(command.showFacts("sample_cube"), "No fact found for sample_cube");
+ assertEquals(factList, "No fact found", "Fact tables should not be found");
// add local storage before adding fact table
TestLensStorageCommands.addLocalStorage(FACT_LOCAL);
URL factSpec = TestLensFactCommands.class.getClassLoader().getResource("fact1.xml");
try {
command.createFact(new File(factSpec.toURI()).getAbsolutePath());
} catch (Exception e) {
- Assert.fail("Unable to create fact table" + e.getMessage());
+ fail("Unable to create fact table" + e.getMessage());
}
factList = command.showFacts(null);
- Assert.assertEquals(command.showFacts("sample_cube"), factList);
+ assertEquals(command.showFacts("sample_cube"), factList);
try {
- Assert.assertEquals(command.showFacts("blah"), factList);
- Assert.fail();
+ assertEquals(command.showFacts("blah"), factList);
+ fail();
} catch (NotFoundException e) {
LOG.info("blah is not a table", e);
}
try {
- Assert.assertEquals(command.showFacts("fact1"), factList);
- Assert.fail();
+ assertEquals(command.showFacts("fact1"), factList);
+ fail();
} catch (NotFoundException e) {
LOG.info("fact1 is a table, but not a cube table", e);
}
- Assert.assertEquals("fact1", factList, "Fact1 table should be found");
+ assertEquals("fact1", factList, "Fact1 table should be found");
}
/**
@@ -159,20 +161,20 @@ public class TestLensFactCommands extends LensCliApplicationTest {
String propString = "name : fact1.prop value : f1";
String propString1 = "name : fact1.prop1 value : f2";
- Assert.assertTrue(desc.contains(propString));
+ assertTrue(desc.contains(propString));
command.updateFactTable("fact1", "/tmp/local-fact1.xml");
desc = command.describeFactTable("fact1");
LOG.debug(desc);
- Assert.assertTrue(desc.contains(propString), "The sample property value is not set");
+ assertTrue(desc.contains(propString), "The sample property value is not set");
- Assert.assertTrue(desc.contains(propString1), "The sample property value is not set");
+ assertTrue(desc.contains(propString1), "The sample property value is not set");
newFile.delete();
} catch (Throwable t) {
t.printStackTrace();
- Assert.fail("Updating of the fact1 table failed with " + t.getMessage());
+ fail("Updating of the fact1 table failed with " + t.getMessage());
}
}
@@ -183,16 +185,16 @@ public class TestLensFactCommands extends LensCliApplicationTest {
private static void testFactStorageActions() {
LensFactCommands command = getCommand();
String result = command.getFactStorages("fact1");
- Assert.assertEquals(FACT_LOCAL, result);
+ assertEquals(FACT_LOCAL, result);
command.dropAllFactStorages("fact1");
result = command.getFactStorages("fact1");
- Assert.assertEquals(result, "No storage found for fact1");
+ assertEquals(result, "No storage found for fact1");
addLocalStorageToFact1();
result = command.getFactStorages("fact1");
- Assert.assertNotEquals(result, "No storage found for fact1");
+ assertNotEquals(result, "No storage found for fact1");
command.dropStorageFromFact("fact1", FACT_LOCAL);
result = command.getFactStorages("fact1");
- Assert.assertEquals(result, "No storage found for fact1");
+ assertEquals(result, "No storage found for fact1");
addLocalStorageToFact1();
}
@@ -207,14 +209,14 @@ public class TestLensFactCommands extends LensCliApplicationTest {
command.addNewFactStorage("fact1", new File(resource.toURI()).getAbsolutePath());
} catch (Throwable t) {
t.printStackTrace();
- Assert.fail("Unable to locate the storage part file for adding new storage to fact table fact1");
+ fail("Unable to locate the storage part file for adding new storage to fact table fact1");
}
result = command.getFactStorages("fact1");
- Assert.assertEquals(FACT_LOCAL, result);
+ assertEquals(FACT_LOCAL, result);
result = command.getStorageFromFact("fact1", FACT_LOCAL);
- Assert.assertTrue(result.contains("HOURLY"));
- Assert.assertTrue(result.contains("DAILY"));
+ assertTrue(result.contains("HOURLY"));
+ assertTrue(result.contains("DAILY"));
}
@@ -224,14 +226,15 @@ public class TestLensFactCommands extends LensCliApplicationTest {
private void testFactPartitionActions() {
LensFactCommands command = getCommand();
String result;
+ verifyEmptyTimelines();
result = command.getAllPartitionsOfFact("fact1", FACT_LOCAL, null);
- Assert.assertTrue(result.trim().isEmpty());
+ assertTrue(result.trim().isEmpty());
try {
command.addPartitionToFact("fact1", FACT_LOCAL, new File(
TestLensFactCommands.class.getClassLoader().getResource("fact1-local-part.xml").toURI()).getAbsolutePath());
} catch (Throwable t) {
t.printStackTrace();
- Assert.fail("Unable to locate the storage part file for adding new storage to fact table fact1");
+ fail("Unable to locate the storage part file for adding new storage to fact table fact1");
}
verifyAndDeletePartitions();
try {
@@ -239,7 +242,7 @@ public class TestLensFactCommands extends LensCliApplicationTest {
TestLensFactCommands.class.getClassLoader().getResource("fact1-local-parts.xml").toURI()).getAbsolutePath());
} catch (Throwable t) {
t.printStackTrace();
- Assert.fail("Unable to locate the storage part file for adding new storage to fact table fact1");
+ fail("Unable to locate the storage part file for adding new storage to fact table fact1");
}
verifyAndDeletePartitions();
@@ -247,27 +250,58 @@ public class TestLensFactCommands extends LensCliApplicationTest {
try {
command.addPartitionToFact("fact1", FACT_LOCAL, new File(
TestLensFactCommands.class.getClassLoader().getResource("fact1-local-parts.xml").toURI()).getAbsolutePath());
- Assert.fail("Should fail");
+ fail("Should fail");
} catch (Throwable t) {
// pass
}
try {
command.addPartitionsToFact("fact1", FACT_LOCAL, new File(
TestLensFactCommands.class.getClassLoader().getResource("fact1-local-part.xml").toURI()).getAbsolutePath());
- Assert.fail("Should fail");
+ fail("Should fail");
} catch (Throwable t) {
// pass
}
}
+ private void verifyEmptyTimelines() {
+ List<String> timelines = command.getTimelines("fact1", null, null, null);
+ assertEquals(timelines.size(), 2);
+ for (String timeline : timelines) {
+ assertTrue(timeline.contains("EndsAndHolesPartitionTimeline"));
+ assertTrue(timeline.contains("first=null"));
+ assertTrue(timeline.contains("latest=null"));
+ assertTrue(timeline.contains("holes=[]"));
+ }
+ }
+
private void verifyAndDeletePartitions() {
- Assert.assertEquals(getCubeCommand().getLatest("sample_cube", "dt"), "2014-03-27T12:00:00:000");
+ List<String> timelines;
+ assertEquals(getCubeCommand().getLatest("sample_cube", "dt"), "2014-03-27T12:00:00:000");
String result = command.getAllPartitionsOfFact("fact1", FACT_LOCAL, null);
- Assert.assertTrue(result.contains("HOURLY"));
+ assertTrue(result.contains("HOURLY"));
+ timelines = command.getTimelines("fact1", null, null, null);
+ assertEquals(timelines.size(), 2);
+ for (String timeline : timelines) {
+ assertTrue(timeline.contains("EndsAndHolesPartitionTimeline"));
+ if (timeline.contains("DAILY")) {
+ assertTrue(timeline.contains("first=null"));
+ assertTrue(timeline.contains("latest=null"));
+ assertTrue(timeline.contains("holes=[]"));
+ } else {
+ assertTrue(timeline.contains("first=2014-03-27-12"));
+ assertTrue(timeline.contains("latest=2014-03-27-12"));
+ assertTrue(timeline.contains("holes=[]"));
+ }
+ }
+ assertEquals(command.getTimelines("fact1", FACT_LOCAL, null, null), timelines);
+ assertTrue(timelines.containsAll(command.getTimelines("fact1", FACT_LOCAL, "hourly", null)));
+ assertTrue(timelines.containsAll(command.getTimelines("fact1", FACT_LOCAL, "hourly", "dt")));
+ assertEquals(command.getTimelines("fact1", null, null, "dt"), timelines);
+ assertEquals(command.getTimelines("fact1", FACT_LOCAL, null, "dt"), timelines);
String dropPartitionsStatus = command.dropAllPartitionsOfFact("fact1", FACT_LOCAL, null);
- Assert.assertFalse(dropPartitionsStatus.contains("Syntax error, please try in following"));
+ assertFalse(dropPartitionsStatus.contains("Syntax error, please try in following"));
result = command.getAllPartitionsOfFact("fact1", FACT_LOCAL, null);
- Assert.assertTrue(result.trim().isEmpty());
+ assertTrue(result.trim().isEmpty());
}
/**
@@ -276,10 +310,10 @@ public class TestLensFactCommands extends LensCliApplicationTest {
public static void dropFact1Table() {
LensFactCommands command = getCommand();
String factList = command.showFacts(null);
- Assert.assertEquals("fact1", factList, "Fact1 table should be found");
+ assertEquals("fact1", factList, "Fact1 table should be found");
command.dropFact("fact1", false);
factList = command.showFacts(null);
- Assert.assertEquals(factList, "No fact found", "Fact tables should not be found");
+ assertEquals(factList, "No fact found", "Fact tables should not be found");
TestLensStorageCommands.dropStorage(FACT_LOCAL);
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cli/src/test/java/org/apache/lens/cli/doc/TestGenerateCLIUserDoc.java
----------------------------------------------------------------------
diff --git a/lens-cli/src/test/java/org/apache/lens/cli/doc/TestGenerateCLIUserDoc.java b/lens-cli/src/test/java/org/apache/lens/cli/doc/TestGenerateCLIUserDoc.java
index 5086907..e5fe067 100644
--- a/lens-cli/src/test/java/org/apache/lens/cli/doc/TestGenerateCLIUserDoc.java
+++ b/lens-cli/src/test/java/org/apache/lens/cli/doc/TestGenerateCLIUserDoc.java
@@ -21,7 +21,10 @@ package org.apache.lens.cli.doc;
import java.io.*;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
+import java.util.ArrayList;
+import java.util.Comparator;
import java.util.HashSet;
+import java.util.TreeSet;
import org.apache.lens.cli.commands.*;
import org.apache.lens.cli.commands.annotations.UserDocumentation;
@@ -32,8 +35,11 @@ import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.testng.annotations.Test;
+import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
+import lombok.extern.slf4j.Slf4j;
+@Slf4j
public class TestGenerateCLIUserDoc {
public static final String APT_FILE = "../src/site/apt/user/cli.apt";
@@ -42,10 +48,17 @@ public class TestGenerateCLIUserDoc {
BufferedWriter bw = new BufferedWriter(new FileWriter(new File(APT_FILE)));
StringBuilder sb = new StringBuilder();
sb.append(getCLIIntroduction()).append("\n\n\n");
- Class[] classes =
- new Class[]{LensConnectionCommands.class, LensDatabaseCommands.class, LensStorageCommands.class,
- LensCubeCommands.class, LensDimensionCommands.class, LensFactCommands.class, LensDimensionTableCommands.class,
- LensNativeTableCommands.class, LensQueryCommands.class, };
+ ArrayList<Class<? extends BaseLensCommand>> classes = Lists.newArrayList(
+ LensConnectionCommands.class,
+ LensDatabaseCommands.class,
+ LensStorageCommands.class,
+ LensCubeCommands.class,
+ LensDimensionCommands.class,
+ LensFactCommands.class,
+ LensDimensionTableCommands.class,
+ LensNativeTableCommands.class,
+ LensQueryCommands.class
+ );
for (Class claz : classes) {
UserDocumentation doc = (UserDocumentation) claz.getAnnotation(UserDocumentation.class);
if (doc != null && StringUtils.isNotBlank(doc.title())) {
@@ -54,11 +67,23 @@ public class TestGenerateCLIUserDoc {
sb.append("*--+--+\n"
+ "|<<Command>>|<<Description>>|\n"
+ "*--+--+\n");
+ TreeSet<Method> methods = Sets.newTreeSet(new Comparator<Method>() {
+ @Override
+ public int compare(Method o1, Method o2) {
+ return o1.getAnnotation(CliCommand.class).value()[0].compareTo(o2.getAnnotation(CliCommand.class).value()[0]);
+ }
+ });
+
for (Method method : claz.getMethods()) {
- CliCommand annot = method.getAnnotation(CliCommand.class);
- if (annot == null) {
- continue;
+ if (method.getAnnotation(CliCommand.class) != null) {
+ methods.add(method);
+ } else {
+ log.info("Not adding " + method.getDeclaringClass().getSimpleName() + "#" + method.getName());
}
+ }
+
+ for (Method method : methods) {
+ CliCommand annot = method.getAnnotation(CliCommand.class);
sb.append("|");
String sep = "";
for (String value : annot.value()) {
@@ -114,7 +139,7 @@ public class TestGenerateCLIUserDoc {
new BufferedReader(new InputStreamReader(TestGenerateCLIUserDoc.class.getResourceAsStream("/cli-intro.apt")));
StringBuilder sb = new StringBuilder();
String line;
- while((line = br.readLine()) != null) {
+ while ((line = br.readLine()) != null) {
sb.append(line).append("\n");
}
return sb;
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-client/src/main/java/org/apache/lens/client/LensClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensClient.java b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
index 016e4ab..9b0c935 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensClient.java
@@ -88,6 +88,11 @@ public class LensClient {
return mc.getLatestDateOfCube(cubeName, timePartition);
}
+ public List<String> getPartitionTimelines(String factName, String storageName, String updatePeriod,
+ String timeDimension) {
+ return mc.getPartitionTimelines(factName, storageName, updatePeriod, timeDimension);
+ }
+
public static class LensClientResultSetWithStats {
private final LensClientResultSet resultSet;
private final LensQuery query;
@@ -216,10 +221,12 @@ public class LensClient {
LOG.debug("Getting all fact table");
return mc.getAllFactTables(cubeName);
}
+
public List<String> getAllDimensionTables() {
LOG.debug("Getting all dimension table");
return mc.getAllDimensionTables();
}
+
public List<String> getAllDimensionTables(String dimensionName) {
LOG.debug("Getting all dimension table");
return mc.getAllDimensionTables(dimensionName);
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
----------------------------------------------------------------------
diff --git a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
index 5e406b5..a6651f6 100644
--- a/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
+++ b/lens-client/src/main/java/org/apache/lens/client/LensMetadataClient.java
@@ -954,4 +954,15 @@ public class LensMetadataClient {
.request(MediaType.APPLICATION_XML)
.get(DateTime.class).getDate();
}
+
+ public List<String> getPartitionTimelines(String factName, String storageName, String updatePeriod,
+ String timeDimension) {
+ return getMetastoreWebTarget().path("facts").path(factName).path("timelines")
+ .queryParam("storage", storageName)
+ .queryParam("updatePeriod", updatePeriod)
+ .queryParam("timeDimension", timeDimension)
+ .queryParam("sessionid", this.connection.getSessionHandle())
+ .request(MediaType.APPLICATION_XML)
+ .get(StringList.class).getElements();
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
index 08a63b8..326a49b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/CubeMetastoreClient.java
@@ -134,8 +134,46 @@ public class CubeMetastoreClient {
allHiveTables.clear();
}
+ public List<PartitionTimeline> getTimelines(String factName, String storage, String updatePeriodStr,
+ String timeDimension)
+ throws LensException, HiveException {
+ UpdatePeriod updatePeriod = updatePeriodStr == null ? null : UpdatePeriod.valueOf(updatePeriodStr.toUpperCase());
+ List<PartitionTimeline> ret = Lists.newArrayList();
+ CubeFactTable fact = getCubeFact(factName);
+ List<String> keys = Lists.newArrayList();
+ if (storage != null) {
+ keys.add(storage);
+ } else {
+ keys.addAll(fact.getStorages());
+ }
+ String partCol = null;
+ if (timeDimension != null) {
+ Cube baseCube;
+ CubeInterface cube = getCube(fact.getCubeName());
+ if (cube instanceof Cube) {
+ baseCube = (Cube) cube;
+ } else {
+ baseCube = ((DerivedCube) cube).getParent();
+ }
+ partCol = baseCube.getPartitionColumnOfTimeDim(timeDimension);
+ }
+ for (String key : keys) {
+ for (Map.Entry<UpdatePeriod, CaseInsensitiveStringHashMap<PartitionTimeline>> entry : partitionTimelineCache
+ .get(factName, key).entrySet()) {
+ if (updatePeriod == null || entry.getKey().equals(updatePeriod)) {
+ for (Map.Entry<String, PartitionTimeline> entry1 : entry.getValue().entrySet()) {
+ if (partCol == null || partCol.equals(entry1.getKey())) {
+ ret.add(entry1.getValue());
+ }
+ }
+ }
+ }
+ }
+ return ret;
+ }
+
/**
- * In-memory storage of {@link org.apache.lens.cube.metadata.timeline.PartitionTimeline} objects for each valid
+ * In-memory storage of {@link PartitionTimeline} objects for each valid
* storagetable-updateperiod-partitioncolumn tuple. also simultaneously stored in metastore table of the
* storagetable.
*/
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
index d27e43e..4ede3db 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/metadata/timeline/PartitionTimeline.java
@@ -33,7 +33,6 @@ import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import lombok.Data;
import lombok.NonNull;
-import lombok.ToString;
import lombok.extern.apachecommons.CommonsLog;
/**
@@ -44,7 +43,6 @@ import lombok.extern.apachecommons.CommonsLog;
* @see org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline
*/
@Data
-@ToString(exclude = {"client"})
@CommonsLog
public abstract class PartitionTimeline implements Iterable<TimePartition> {
private final String storageTableName;
@@ -89,9 +87,10 @@ public abstract class PartitionTimeline implements Iterable<TimePartition> {
props.put(entry.getKey().substring(prefix.length()), entry.getValue());
}
}
- log.info("initializing timeline: " + getStorageTableName() + ", " + getUpdatePeriod() + ", " + getPartCol());
+ log.info("initializing timeline from table properties: "
+ + getStorageTableName() + ", " + getUpdatePeriod() + ", " + getPartCol());
initFromProperties(props);
- log.info("initialized to " + toProperties());
+ log.info("initialized to: " + this);
}
/**
@@ -116,9 +115,11 @@ public abstract class PartitionTimeline implements Iterable<TimePartition> {
if (getAll() == null) {
return true;
}
+ log.info("initializing timeline from batch addition: "
+ + getStorageTableName() + ", " + getUpdatePeriod() + ", " + getPartCol());
boolean result = add(getAll());
all = null;
- log.info("after commit batch additions, timeline is: " + this);
+ log.info("initialized to: " + this);
return result;
}
@@ -147,6 +148,7 @@ public abstract class PartitionTimeline implements Iterable<TimePartition> {
// Can also return the failed to add items.
return result;
}
+
/**
* Add partition range to the timeline. Default implementation is to iterate over the range and add
* each time partition belonging to the given range. Implementing classes can override.
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
index 69bd57e..5c776cc 100644
--- a/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
+++ b/lens-cube/src/test/java/org/apache/lens/cube/parse/CubeTestSetup.java
@@ -19,12 +19,18 @@
package org.apache.lens.cube.parse;
+import static org.apache.lens.cube.metadata.UpdatePeriod.*;
+
+import static org.testng.Assert.assertEquals;
+import static org.testng.Assert.assertNotNull;
+
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import org.apache.lens.cube.metadata.*;
import org.apache.lens.cube.metadata.timeline.EndsAndHolesPartitionTimeline;
+import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
import org.apache.lens.cube.metadata.timeline.StoreAllPartitionTimeline;
import org.apache.lens.server.api.LensConfConstants;
import org.apache.lens.server.api.error.LensException;
@@ -43,11 +49,10 @@ import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.mapred.TextInputFormat;
-import org.testng.Assert;
-
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
@@ -162,7 +167,7 @@ public class CubeTestSetup {
BEFORE_4_DAYS_START = cal.getTime();
- THIS_YEAR_START = DateUtils.truncate(NOW, UpdatePeriod.YEARLY.calendarField());
+ THIS_YEAR_START = DateUtils.truncate(NOW, YEARLY.calendarField());
THIS_YEAR_END = DateUtils.addYears(THIS_YEAR_START, 1);
LAST_YEAR_START = DateUtils.addYears(THIS_YEAR_START, -1);
LAST_YEAR_END = THIS_YEAR_START;
@@ -276,7 +281,7 @@ public class CubeTestSetup {
List<String> notLatestConditions) {
StringBuilder expected = new StringBuilder();
int numTabs = storageTableToWhereClause.size();
- Assert.assertEquals(1, numTabs);
+ assertEquals(1, numTabs);
for (Map.Entry<String, String> entry : storageTableToWhereClause.entrySet()) {
String storageTable = entry.getKey();
expected.append(selExpr);
@@ -363,14 +368,14 @@ public class CubeTestSetup {
List<String> dailyparts = new ArrayList<String>();
Date dayStart;
if (!CubeTestSetup.isZerothHour()) {
- addParts(hourlyparts, UpdatePeriod.HOURLY, from, DateUtil.getCeilDate(from, UpdatePeriod.DAILY));
- addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(to, UpdatePeriod.DAILY),
- DateUtil.getFloorDate(to, UpdatePeriod.HOURLY));
- dayStart = DateUtil.getCeilDate(from, UpdatePeriod.DAILY);
+ addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
+ addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
+ DateUtil.getFloorDate(to, HOURLY));
+ dayStart = DateUtil.getCeilDate(from, DAILY);
} else {
dayStart = from;
}
- addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getFloorDate(to, UpdatePeriod.DAILY));
+ addParts(dailyparts, DAILY, dayStart, DateUtil.getFloorDate(to, DAILY));
List<String> parts = new ArrayList<String>();
parts.addAll(hourlyparts);
parts.addAll(dailyparts);
@@ -385,14 +390,14 @@ public class CubeTestSetup {
List<String> dailyparts = new ArrayList<String>();
Date dayStart;
if (!CubeTestSetup.isZerothHour()) {
- addParts(hourlyparts, UpdatePeriod.HOURLY, from, DateUtil.getCeilDate(from, UpdatePeriod.DAILY));
- addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(to, UpdatePeriod.DAILY),
- DateUtil.getFloorDate(to, UpdatePeriod.HOURLY));
- dayStart = DateUtil.getCeilDate(from, UpdatePeriod.DAILY);
+ addParts(hourlyparts, HOURLY, from, DateUtil.getCeilDate(from, DAILY));
+ addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(to, DAILY),
+ DateUtil.getFloorDate(to, HOURLY));
+ dayStart = DateUtil.getCeilDate(from, DAILY);
} else {
dayStart = from;
}
- addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getFloorDate(to, UpdatePeriod.DAILY));
+ addParts(dailyparts, DAILY, dayStart, DateUtil.getFloorDate(to, DAILY));
updatePeriodToWhereMap.put("DAILY", StorageUtil.getWherePartClause(timedDimension, cubeName, dailyparts));
updatePeriodToWhereMap.put("HOURLY", StorageUtil.getWherePartClause(timedDimension, cubeName, hourlyparts));
return updatePeriodToWhereMap;
@@ -409,22 +414,22 @@ public class CubeTestSetup {
Date dayStart = TWO_MONTHS_BACK;
Date monthStart = TWO_MONTHS_BACK;
if (!CubeTestSetup.isZerothHour()) {
- addParts(hourlyparts, UpdatePeriod.HOURLY, TWO_MONTHS_BACK,
- DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY));
- addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY),
- DateUtil.getFloorDate(NOW, UpdatePeriod.HOURLY));
- dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY);
- monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+ addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
+ DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
+ addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
+ DateUtil.getFloorDate(NOW, HOURLY));
+ dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY);
+ monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
}
Calendar cal = new GregorianCalendar();
cal.setTime(dayStart);
if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
- addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY));
- monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+ addParts(dailyparts, DAILY, dayStart, DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY));
+ monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
}
- addParts(dailyparts, UpdatePeriod.DAILY, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY),
- DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY));
- addParts(monthlyparts, UpdatePeriod.MONTHLY, monthStart, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY));
+ addParts(dailyparts, DAILY, DateUtil.getFloorDate(NOW, MONTHLY),
+ DateUtil.getFloorDate(NOW, DAILY));
+ addParts(monthlyparts, MONTHLY, monthStart, DateUtil.getFloorDate(NOW, MONTHLY));
List<String> parts = new ArrayList<String>();
parts.addAll(dailyparts);
parts.addAll(hourlyparts);
@@ -461,22 +466,22 @@ public class CubeTestSetup {
Date dayStart = TWO_MONTHS_BACK;
Date monthStart = TWO_MONTHS_BACK;
if (!CubeTestSetup.isZerothHour()) {
- addParts(hourlyparts, UpdatePeriod.HOURLY, TWO_MONTHS_BACK,
- DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY));
- addParts(hourlyparts, UpdatePeriod.HOURLY, DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY),
- DateUtil.getFloorDate(NOW, UpdatePeriod.HOURLY));
- dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.DAILY);
- monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+ addParts(hourlyparts, HOURLY, TWO_MONTHS_BACK,
+ DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY));
+ addParts(hourlyparts, HOURLY, DateUtil.getFloorDate(NOW, DAILY),
+ DateUtil.getFloorDate(NOW, HOURLY));
+ dayStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, DAILY);
+ monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
}
Calendar cal = new GregorianCalendar();
cal.setTime(dayStart);
if (cal.get(Calendar.DAY_OF_MONTH) != 1) {
- addParts(dailyparts, UpdatePeriod.DAILY, dayStart, DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY));
- monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, UpdatePeriod.MONTHLY);
+ addParts(dailyparts, DAILY, dayStart, DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY));
+ monthStart = DateUtil.getCeilDate(TWO_MONTHS_BACK, MONTHLY);
}
- addParts(dailyparts, UpdatePeriod.DAILY, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY),
- DateUtil.getFloorDate(NOW, UpdatePeriod.DAILY));
- addParts(monthlyparts, UpdatePeriod.MONTHLY, monthStart, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY));
+ addParts(dailyparts, DAILY, DateUtil.getFloorDate(NOW, MONTHLY),
+ DateUtil.getFloorDate(NOW, DAILY));
+ addParts(monthlyparts, MONTHLY, monthStart, DateUtil.getFloorDate(NOW, MONTHLY));
updatePeriodToPart.put("HOURLY", hourlyparts);
updatePeriodToPart.put("DAILY", dailyparts);
@@ -501,7 +506,7 @@ public class CubeTestSetup {
public static Map<String, String> getWhereForMonthly2months(String monthlyTable) {
Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
List<String> parts = new ArrayList<String>();
- addParts(parts, UpdatePeriod.MONTHLY, TWO_MONTHS_BACK, DateUtil.getFloorDate(NOW, UpdatePeriod.MONTHLY));
+ addParts(parts, MONTHLY, TWO_MONTHS_BACK, DateUtil.getFloorDate(NOW, MONTHLY));
storageTableToWhereClause.put(getDbName() + monthlyTable,
StorageUtil.getWherePartClause("dt", TEST_CUBE_NAME, parts));
return storageTableToWhereClause;
@@ -514,7 +519,7 @@ public class CubeTestSetup {
public static Map<String, String> getWhereForHourly2days(String alias, String hourlyTable) {
Map<String, String> storageTableToWhereClause = new LinkedHashMap<String, String>();
List<String> parts = new ArrayList<String>();
- addParts(parts, UpdatePeriod.HOURLY, TWODAYS_BACK, DateUtil.getFloorDate(NOW, UpdatePeriod.HOURLY));
+ addParts(parts, HOURLY, TWODAYS_BACK, DateUtil.getFloorDate(NOW, HOURLY));
storageTableToWhereClause.put(getDbName() + hourlyTable, StorageUtil.getWherePartClause("dt", alias, parts));
return storageTableToWhereClause;
}
@@ -835,12 +840,12 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.MINUTELY);
- updates.add(UpdatePeriod.HOURLY);
- updates.add(UpdatePeriod.DAILY);
- updates.add(UpdatePeriod.MONTHLY);
- updates.add(UpdatePeriod.QUARTERLY);
- updates.add(UpdatePeriod.YEARLY);
+ updates.add(MINUTELY);
+ updates.add(HOURLY);
+ updates.add(DAILY);
+ updates.add(MONTHLY);
+ updates.add(QUARTERLY);
+ updates.add(YEARLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
@@ -945,7 +950,7 @@ public class CubeTestSetup {
storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.HOURLY);
+ updates.add(HOURLY);
storageAggregatePeriods.put(c1, updates);
storageTables = new HashMap<String, StorageTableDesc>();
@@ -974,7 +979,7 @@ public class CubeTestSetup {
storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.HOURLY);
+ updates.add(HOURLY);
storageAggregatePeriods.put(c1, updates);
storageTables = new HashMap<String, StorageTableDesc>();
@@ -985,7 +990,7 @@ public class CubeTestSetup {
}
- private void createCubeFact(CubeMetastoreClient client) throws HiveException, LensException {
+ private void createCubeFact(CubeMetastoreClient client) throws Exception {
String factName = "testFact";
List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
for (CubeMeasure measure : cubeMeasures) {
@@ -1002,12 +1007,12 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.MINUTELY);
- updates.add(UpdatePeriod.HOURLY);
- updates.add(UpdatePeriod.DAILY);
- updates.add(UpdatePeriod.MONTHLY);
- updates.add(UpdatePeriod.QUARTERLY);
- updates.add(UpdatePeriod.YEARLY);
+ updates.add(MINUTELY);
+ updates.add(HOURLY);
+ updates.add(DAILY);
+ updates.add(MONTHLY);
+ updates.add(QUARTERLY);
+ updates.add(YEARLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
@@ -1046,9 +1051,9 @@ public class CubeTestSetup {
Table table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(),
Storage.getPrefix(c4)));
- table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY, "ttd"),
+ table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY, "ttd"),
StoreAllPartitionTimeline.class.getCanonicalName());
- table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY, "ttd2"),
+ table.getParameters().put(MetastoreUtil.getPartitionTimelineStorageClassKey(HOURLY, "ttd2"),
StoreAllPartitionTimeline.class.getCanonicalName());
client.pushHiveTable(table);
// Add all hourly partitions for two days
@@ -1057,12 +1062,21 @@ public class CubeTestSetup {
Date temp = cal.getTime();
List<StoragePartitionDesc> storagePartitionDescs = Lists.newArrayList();
List<String> partitions = Lists.newArrayList();
+ StoreAllPartitionTimeline ttdStoreAll =
+ new StoreAllPartitionTimeline(MetastoreUtil.getFactStorageTableName(fact.getName(), c4), HOURLY,
+ "ttd");
+ StoreAllPartitionTimeline ttd2StoreAll =
+ new StoreAllPartitionTimeline(MetastoreUtil.getFactStorageTableName(fact.getName(), c4), HOURLY,
+ "ttd2");
while (!(temp.after(NOW))) {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put("ttd", temp);
timeParts.put("ttd2", temp);
- partitions.add(UpdatePeriod.HOURLY.format().format(temp));
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ TimePartition tp = TimePartition.of(HOURLY, temp);
+ ttdStoreAll.add(tp);
+ ttd2StoreAll.add(tp);
+ partitions.add(HOURLY.format().format(temp));
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
storagePartitionDescs.add(sPartSpec);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
@@ -1071,49 +1085,15 @@ public class CubeTestSetup {
client.clearHiveTableCache();
table = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(),
Storage.getPrefix(c4)));
- Assert.assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
- Assert.assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.DAILY,
- "ttd")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.DAILY, "ttd2")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY, "ttd")),
- StoreAllPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.HOURLY, "ttd2")),
- StoreAllPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.MINUTELY, "ttd")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.MINUTELY, "ttd2")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.MONTHLY, "ttd")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.MONTHLY, "ttd2")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.QUARTERLY, "ttd")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.QUARTERLY, "ttd2")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.YEARLY, "ttd")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(table.getParameters().get(
- MetastoreUtil.getPartitionTimelineStorageClassKey(UpdatePeriod.YEARLY, "ttd2")),
- EndsAndHolesPartitionTimeline.class.getCanonicalName());
- Assert.assertEquals(MetastoreUtil.getNamedStringValue(table.getParameters(),
- MetastoreUtil.getPartitionInfoKeyPrefix(UpdatePeriod.HOURLY, "ttd") + "partitions"),
- StringUtils.join(partitions, ","));
- Assert.assertEquals(MetastoreUtil.getNamedStringValue(table.getParameters(),
- MetastoreUtil.getPartitionInfoKeyPrefix(UpdatePeriod.HOURLY, "ttd2") + "partitions"),
- StringUtils.join(partitions, ","));
+ assertEquals(table.getParameters().get(MetastoreUtil.getPartitionTimelineCachePresenceKey()), "true");
+ for(UpdatePeriod period: Lists.newArrayList(DAILY, MINUTELY, MONTHLY, YEARLY, QUARTERLY)) {
+ for(String partCol: Lists.newArrayList("ttd", "ttd2")) {
+ assertTimeline(client, fact.getName(), c4, period, partCol, EndsAndHolesPartitionTimeline.class);
+ }
+ }
+ assertTimeline(client, fact.getName(), c4, HOURLY, "ttd", ttdStoreAll);
+ assertTimeline(client, fact.getName(), c4, HOURLY, "ttd2", ttd2StoreAll);
+
// Add all hourly partitions for TWO_DAYS_RANGE_BEFORE_4_DAYS
cal.setTime(BEFORE_4_DAYS_START);
temp = cal.getTime();
@@ -1121,12 +1101,40 @@ public class CubeTestSetup {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put("ttd", temp);
timeParts.put("ttd2", temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
client.addPartition(sPartSpec, c4);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
}
}
+ private void assertTimeline(CubeMetastoreClient client, String factName, String storageName,
+ UpdatePeriod updatePeriod, String timeDim, PartitionTimeline expectedTimeline)
+ throws Exception {
+ assertNotNull(factName);
+ assertNotNull(storageName);
+ assertNotNull(updatePeriod);
+ assertNotNull(timeDim);
+ String storageTableName = MetastoreUtil.getFactStorageTableName(factName, storageName);
+ List<PartitionTimeline> timelines = client.getTimelines(factName, storageName, updatePeriod.name(), timeDim);
+ assertEquals(timelines.size(), 1);
+ PartitionTimeline actualTimeline = timelines.get(0);
+ assertEquals(actualTimeline, expectedTimeline);
+ assertEquals(client.getTable(storageTableName).getParameters()
+ .get(MetastoreUtil.getPartitionTimelineStorageClassKey(updatePeriod,
+ timeDim)), expectedTimeline.getClass().getCanonicalName());
+ expectedTimeline.init(client.getTable(MetastoreUtil.getFactStorageTableName(factName, storageName)));
+ assertEquals(actualTimeline, expectedTimeline);
+ }
+
+ private void assertTimeline(CubeMetastoreClient client, String factName, String storageName,
+ UpdatePeriod updatePeriod, String timeDim, Class<? extends PartitionTimeline> partitionTimelineClass)
+ throws Exception {
+ String storageTableName = MetastoreUtil.getFactStorageTableName(factName, storageName);
+ PartitionTimeline expectedTimeline = partitionTimelineClass.getConstructor(
+ String.class, UpdatePeriod.class, String.class)
+ .newInstance(storageTableName, updatePeriod, timeDim);
+ assertTimeline(client, factName, storageName, updatePeriod, timeDim, expectedTimeline);
+ }
private void createCubeCheapFact(CubeMetastoreClient client) throws HiveException, LensException {
String factName = "cheapFact";
@@ -1144,12 +1152,12 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.MINUTELY);
- updates.add(UpdatePeriod.HOURLY);
- updates.add(UpdatePeriod.DAILY);
- updates.add(UpdatePeriod.MONTHLY);
- updates.add(UpdatePeriod.QUARTERLY);
- updates.add(UpdatePeriod.YEARLY);
+ updates.add(MINUTELY);
+ updates.add(HOURLY);
+ updates.add(DAILY);
+ updates.add(MONTHLY);
+ updates.add(QUARTERLY);
+ updates.add(YEARLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
@@ -1188,7 +1196,7 @@ public class CubeTestSetup {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put("ttd", temp);
timeParts.put("ttd2", temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
client.addPartition(sPartSpec, c99);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
@@ -1201,7 +1209,7 @@ public class CubeTestSetup {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put("ttd", temp);
timeParts.put("ttd2", temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
client.addPartition(sPartSpec, c99);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
@@ -1220,7 +1228,7 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.WEEKLY);
+ updates.add(WEEKLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
partCols.add(TestCubeMetastoreClient.getDatePartition());
@@ -1255,7 +1263,7 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.HOURLY);
+ updates.add(HOURLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
partCols.add(TestCubeMetastoreClient.getDatePartition());
@@ -1283,7 +1291,7 @@ public class CubeTestSetup {
while (!(temp.after(NOW))) {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, HOURLY);
try {
client.addPartition(sPartSpec, c1);
} catch (HiveException e) {
@@ -1301,7 +1309,7 @@ public class CubeTestSetup {
while (!(temp.after(BEFORE_4_DAYS_END))) {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, HOURLY);
client.addPartition(sPartSpec, c1);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
@@ -1326,7 +1334,7 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.HOURLY);
+ updates.add(HOURLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
partCols.add(TestCubeMetastoreClient.getDatePartition());
@@ -1360,7 +1368,7 @@ public class CubeTestSetup {
while (!(temp.after(NOW))) {
Map<String, Date> timeParts = new HashMap<String, Date>();
timeParts.put(TestCubeMetastoreClient.getDatePartitionKey(), temp);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact2.getName(), timeParts, null, HOURLY);
client.addPartition(sPartSpec, c3);
cal.add(Calendar.HOUR_OF_DAY, 1);
temp = cal.getTime();
@@ -1379,7 +1387,7 @@ public class CubeTestSetup {
Map<String, Set<UpdatePeriod>> storageAggregatePeriods = new HashMap<String, Set<UpdatePeriod>>();
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.MONTHLY);
+ updates.add(MONTHLY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
partCols.add(TestCubeMetastoreClient.getDatePartition());
@@ -1446,7 +1454,7 @@ public class CubeTestSetup {
Map<String, String> tblPros = Maps.newHashMap();
tblPros.put(LensConfConstants.STORAGE_COST, "100");
s1.setTblProps(tblPros);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1549,7 +1557,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1570,7 +1578,7 @@ public class CubeTestSetup {
dimColumns.add(new FieldSchema("name", "string", "field1"));
dimColumns.add(new FieldSchema("cityId", "string", "f-key to cityDim"));
storageTables.put(c3, s1);
- dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c3, HOURLY);
client.createCubeDimensionTable(dimName, dimTblName, dimColumns, 10L, dumpPeriods, dimProps, storageTables);
@@ -1613,7 +1621,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c3, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1673,7 +1681,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1714,7 +1722,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1761,7 +1769,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1807,7 +1815,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
StorageTableDesc s2 = new StorageTableDesc();
s2.setInputFormat(TextInputFormat.class.getCanonicalName());
@@ -1849,7 +1857,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(c1, s1);
@@ -1899,7 +1907,7 @@ public class CubeTestSetup {
partCols.add(dimColumns.remove(dimColumns.size() - 2));
s2.setPartCols(partCols);
dumpPeriods.clear();
- dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c3, HOURLY);
storageTables.clear();
storageTables.put(c3, s2);
dimProps.put(MetastoreUtil.getDimTablePartsKey(dimTblName), partCols.get(0).getName());
@@ -1937,7 +1945,7 @@ public class CubeTestSetup {
s1.setOutputFormat(HiveIgnoreKeyTextOutputFormat.class.getCanonicalName());
s1.setPartCols(partCols);
s1.setTimePartCols(timePartCols);
- dumpPeriods.put(c1, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c1, HOURLY);
Map<String, StorageTableDesc> storageTables = new HashMap<String, StorageTableDesc>();
storageTables.put(c1, s1);
@@ -1953,7 +1961,7 @@ public class CubeTestSetup {
s2.setPartCols(partCols);
s2.setTimePartCols(timePartCols);
dumpPeriods.clear();
- dumpPeriods.put(c3, UpdatePeriod.HOURLY);
+ dumpPeriods.put(c3, HOURLY);
storageTables.clear();
storageTables.put(c3, s2);
dimProps.put(MetastoreUtil.getDimTablePartsKey(dimTblName), partCols.get(1).getName());
@@ -2009,7 +2017,7 @@ public class CubeTestSetup {
metastore.dropDatabase(dbName, true, true, true);
}
- private void createCubeFactsWithValidColumns(CubeMetastoreClient client) throws HiveException, LensException {
+ private void createCubeFactsWithValidColumns(CubeMetastoreClient client) throws Exception {
String factName = "summary1";
StringBuilder commonCols = new StringBuilder();
List<FieldSchema> factColumns = new ArrayList<FieldSchema>(cubeMeasures.size());
@@ -2027,9 +2035,9 @@ public class CubeTestSetup {
factColumns.add(new FieldSchema("zipcode", "int", "zip"));
factColumns.add(new FieldSchema("cityid", "int", "city id"));
Set<UpdatePeriod> updates = new HashSet<UpdatePeriod>();
- updates.add(UpdatePeriod.MINUTELY);
- updates.add(UpdatePeriod.HOURLY);
- updates.add(UpdatePeriod.DAILY);
+ updates.add(MINUTELY);
+ updates.add(HOURLY);
+ updates.add(DAILY);
ArrayList<FieldSchema> partCols = new ArrayList<FieldSchema>();
List<String> timePartCols = new ArrayList<String>();
@@ -2106,7 +2114,7 @@ public class CubeTestSetup {
}
private void createPIEParts(CubeMetastoreClient client, CubeFactTable fact, String storageName)
- throws HiveException, LensException {
+ throws Exception {
// Add partitions in PIE storage
Calendar pcal = Calendar.getInstance();
pcal.setTime(TWODAYS_BACK);
@@ -2114,6 +2122,17 @@ public class CubeTestSetup {
Calendar ical = Calendar.getInstance();
ical.setTime(TWODAYS_BACK);
ical.set(Calendar.HOUR, 0);
+
+ Map<UpdatePeriod, TreeSet<Date>> pTimes = Maps.newHashMap();
+ pTimes.put(DAILY, Sets.<Date>newTreeSet());
+ pTimes.put(HOURLY, Sets.<Date>newTreeSet());
+ Map<UpdatePeriod, TreeSet<Date>> iTimes = Maps.newHashMap();
+ iTimes.put(DAILY, Sets.<Date>newTreeSet());
+ iTimes.put(HOURLY, Sets.<Date>newTreeSet());
+ Map<String, Map<UpdatePeriod, TreeSet<Date>>> times = Maps.newHashMap();
+ times.put("et", iTimes);
+ times.put("it", iTimes);
+ times.put("pt", pTimes);
// pt=day1 and it=day1
// pt=day2-hour[0-3] it = day1-hour[20-23]
// pt=day2 and it=day1
@@ -2129,7 +2148,9 @@ public class CubeTestSetup {
timeParts.put("pt", ptime);
timeParts.put("it", itime);
timeParts.put("et", itime);
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.DAILY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, DAILY);
+ pTimes.get(DAILY).add(ptime);
+ iTimes.get(DAILY).add(itime);
client.addPartition(sPartSpec, storageName);
pcal.add(Calendar.DAY_OF_MONTH, 1);
ical.add(Calendar.HOUR_OF_DAY, 20);
@@ -2144,7 +2165,9 @@ public class CubeTestSetup {
timeParts.put("it", itime);
timeParts.put("et", itime);
// pt=day2 and it=day1
- StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.DAILY);
+ StoragePartitionDesc sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, DAILY);
+ pTimes.get(DAILY).add(ptime);
+ iTimes.get(DAILY).add(itime);
client.addPartition(sPartSpec, storageName);
// pt=day2-hour[0-3] it = day1-hour[20-23]
// pt=day2-hour[4-23] it = day2-hour[0-19]
@@ -2154,13 +2177,17 @@ public class CubeTestSetup {
timeParts.put("pt", ptime);
timeParts.put("it", itime);
timeParts.put("et", itime);
- sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
+ pTimes.get(HOURLY).add(ptime);
+ iTimes.get(HOURLY).add(itime);
client.addPartition(sPartSpec, storageName);
pcal.add(Calendar.HOUR_OF_DAY, 1);
ical.add(Calendar.HOUR_OF_DAY, 1);
}
// pt=day2 and it=day2
- sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.DAILY);
+ sPartSpec = new StoragePartitionDesc(fact.getName(), timeParts, null, DAILY);
+ pTimes.get(DAILY).add(ptime);
+ iTimes.get(DAILY).add(itime);
client.addPartition(sPartSpec, storageName);
} else if (p == 3) { // day3
// pt=day3-hour[0-3] it = day2-hour[20-23]
@@ -2172,44 +2199,27 @@ public class CubeTestSetup {
timeParts.put("it", itime);
timeParts.put("et", itime);
StoragePartitionDesc sPartSpec =
- new StoragePartitionDesc(fact.getName(), timeParts, null, UpdatePeriod.HOURLY);
+ new StoragePartitionDesc(fact.getName(), timeParts, null, HOURLY);
+ pTimes.get(HOURLY).add(ptime);
+ iTimes.get(HOURLY).add(itime);
client.addPartition(sPartSpec, storageName);
pcal.add(Calendar.HOUR_OF_DAY, 1);
ical.add(Calendar.HOUR_OF_DAY, 1);
}
}
}
- Map<String, String> params = client.getTable(MetastoreUtil.getStorageTableName(fact.getName(), Storage.getPrefix(
- storageName))).getParameters();
+ String storageTableName = MetastoreUtil.getStorageTableName(fact.getName(), Storage.getPrefix(
+ storageName));
+ Map<String, String> params = client.getTable(storageTableName).getParameters();
String prefix = MetastoreConstants.STORAGE_PFX + MetastoreConstants.PARTITION_TIMELINE_CACHE;
- Assert.assertEquals(params.get(prefix + "present"), "true");
- for (UpdatePeriod up : Arrays.asList(UpdatePeriod.DAILY, UpdatePeriod.HOURLY)) {
- for (String p : Arrays.asList("et", "it", "pt")) {
- String first = params.get(prefix + up + "." + p + "." + "first");
- String latest = params.get(prefix + up + "." + p + "." + "latest");
- String holes = MetastoreUtil.getNamedStringValue(params, prefix + up + "." + p + "." + "holes");
- String storageClass = params.get(prefix + up + "." + p + "." + "storage.class");
- Assert.assertNotNull(first);
- Assert.assertNotNull(latest);
- Assert.assertEquals(holes, "");
- Assert.assertEquals(storageClass, EndsAndHolesPartitionTimeline.class.getCanonicalName());
- try {
- up.format().parse(first);
- up.format().parse(latest);
- } catch (java.text.ParseException e) {
- Assert.fail("parse failed. first/latest not updated correctly in table");
- }
- }
- for (String p : Arrays.asList("et", "it", "pt")) {
- up = UpdatePeriod.MINUTELY;
- String first = params.get(prefix + up + "." + p + "." + "first");
- String latest = params.get(prefix + up + "." + p + "." + "latest");
- String holes = MetastoreUtil.getNamedStringValue(params, prefix + up + "." + p + "." + "holes");
- String storageClass = params.get(prefix + up + "." + p + "." + "storage.class");
- Assert.assertEquals(first, "");
- Assert.assertEquals(latest, "");
- Assert.assertEquals(holes, "");
- Assert.assertEquals(storageClass, EndsAndHolesPartitionTimeline.class.getCanonicalName());
+ assertEquals(params.get(prefix + "present"), "true");
+ for (String p : Arrays.asList("et", "it", "pt")) {
+ assertTimeline(client, fact.getName(), storageName, MINUTELY, p, EndsAndHolesPartitionTimeline.class);
+ for (UpdatePeriod up : Arrays.asList(DAILY, HOURLY)) {
+ EndsAndHolesPartitionTimeline timeline = new EndsAndHolesPartitionTimeline(storageTableName, up, p);
+ timeline.setFirst(TimePartition.of(up, times.get(p).get(up).first()));
+ timeline.setLatest(TimePartition.of(up, times.get(p).get(up).last()));
+ assertTimeline(client, fact.getName(), storageName, up, p, timeline);
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
----------------------------------------------------------------------
diff --git a/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java b/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
index cde85f6..890da4c 100644
--- a/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
+++ b/lens-server-api/src/main/java/org/apache/lens/server/api/metastore/CubeMetastoreService.java
@@ -551,4 +551,6 @@ public interface CubeMetastoreService {
Date getLatestDateOfCube(LensSessionHandle sessionid, String cubeName, String timeDimension)
throws LensException, HiveException;
+ List<String> getPartitionTimelines(LensSessionHandle sessionid, String factName, String storage,
+ String updatePeriod, String timeDimension) throws LensException, HiveException;
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
index b0f61b6..4597614 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/CubeMetastoreServiceImpl.java
@@ -26,6 +26,7 @@ import javax.ws.rs.NotFoundException;
import org.apache.lens.api.LensSessionHandle;
import org.apache.lens.api.metastore.*;
import org.apache.lens.cube.metadata.*;
+import org.apache.lens.cube.metadata.timeline.PartitionTimeline;
import org.apache.lens.server.LensService;
import org.apache.lens.server.api.error.LensException;
import org.apache.lens.server.api.metastore.CubeMetastoreService;
@@ -44,6 +45,8 @@ import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.apache.thrift.TException;
+import com.google.common.collect.Lists;
+
public class CubeMetastoreServiceImpl extends LensService implements CubeMetastoreService {
public static final Logger LOG = LogManager.getLogger(CubeMetastoreServiceImpl.class);
@@ -1357,4 +1360,14 @@ public class CubeMetastoreServiceImpl extends LensService implements CubeMetasto
release(sessionid);
return latest;
}
+
+ public List<String> getPartitionTimelines(LensSessionHandle sessionid, String factName, String storage,
+ String updatePeriod, String timeDimension) throws LensException, HiveException {
+ CubeMetastoreClient client = getClient(sessionid);
+ List<String> ret = Lists.newArrayList();
+ for (PartitionTimeline timeline : client.getTimelines(factName, storage, updatePeriod, timeDimension)) {
+ ret.add(timeline.toString());
+ }
+ return ret;
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
----------------------------------------------------------------------
diff --git a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
index 73e8750..e746ebb 100644
--- a/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
+++ b/lens-server/src/main/java/org/apache/lens/server/metastore/MetastoreResource.java
@@ -1552,4 +1552,31 @@ public class MetastoreResource {
throw new WebApplicationException(exc);
}
}
+
+ /**
+ * Get the partition timelines.
+ *
+ * @param sessionid The sessionid in which user is working
+ * @param factName name of the fact
+ * @param storage storage Name
+ * @param updatePeriod update period
+ * @param timeDimension time dimension name
+ * @return List os partition timelines.
+ */
+ @GET
+ @Path("/facts/{factName}/timelines")
+ public StringList getPartitionTimelines(@QueryParam("sessionid") LensSessionHandle sessionid,
+ @PathParam("factName") String factName, @QueryParam("storage") String storage,
+ @QueryParam("updatePeriod") String updatePeriod, @QueryParam("timeDimension") String timeDimension)
+ throws LensException, HiveException {
+ checkSessionId(sessionid);
+ try {
+ return new StringList(getSvc().getPartitionTimelines(sessionid, factName, storage,
+ updatePeriod, timeDimension));
+ } catch (LensException exc) {
+ checkTableNotFound(exc, factName);
+ LOG.error("Error finding partition timelines for fact: " + factName);
+ throw exc;
+ }
+ }
}
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
index 859e9cc..aab9771 100644
--- a/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
+++ b/lens-server/src/test/java/org/apache/lens/server/LensTestUtil.java
@@ -255,7 +255,6 @@ public final class LensTestUtil {
Hive hive = Hive.get(conf);
File testJarFile = new File("testdata/test.jar");
File serdeJarFile = new File("testdata/serde.jar");
-
for (String db : testDatabases) {
Database database = new Database();
database.setName(db);
http://git-wip-us.apache.org/repos/asf/incubator-lens/blob/e0b0c4c5/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index 8ecf2a8..dc243aa 100644
--- a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -723,15 +723,16 @@ public class TestQueryService extends LensJerseyTest {
APIResult result = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request()
.delete(APIResult.class);
// cancel would fail query is already successful
- Assert.assertTrue(result.getStatus().equals(APIResult.Status.SUCCEEDED)
- || result.getStatus().equals(APIResult.Status.FAILED));
-
LensQuery ctx2 = target.path(handle2.toString()).queryParam("sessionid", lensSessionId).request()
.get(LensQuery.class);
if (result.getStatus().equals(APIResult.Status.FAILED)) {
- Assert.assertTrue(ctx2.getStatus().getStatus() == QueryStatus.Status.SUCCESSFUL);
+ Assert.assertEquals(ctx2.getStatus().getStatus(), QueryStatus.Status.SUCCESSFUL,
+ "cancel failed without query having been succeeded");
+ } else if (result.getStatus().equals(APIResult.Status.SUCCEEDED)) {
+ Assert.assertEquals(ctx2.getStatus().getStatus(), QueryStatus.Status.CANCELED,
+ "cancel succeeded but query wasn't cancelled");
} else {
- Assert.assertTrue(ctx2.getStatus().getStatus() == QueryStatus.Status.CANCELED);
+ Assert.fail("unexpected cancel status: " + result.getStatus());
}
// Test http download end point