You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ra...@apache.org on 2018/09/14 13:37:24 UTC

carbondata git commit: [CARBONDATA-2916] Add CarbonCli tool for data summary

Repository: carbondata
Updated Branches:
  refs/heads/master 055b7a784 -> c40d85478


[CARBONDATA-2916] Add CarbonCli tool for data summary

A tool is added to print information of a given data folder

usage: CarbonCli
 -a,--all                    print all information
 -b,--blocklet               print blocklet size detail
 -c,--column <column name>   column to print statistics
 -cmd <command name>         command to execute, supported commands are:
                             summary
 -h,--help                   print this message
 -m,--showSegment            print segment information
 -p,--path <path>            the path which contains carbondata files,
                             nested folder is supported
 -s,--schema                 print the schema
 -t,--tblProperties          print table properties

This closes #2683


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/c40d8547
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/c40d8547
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/c40d8547

Branch: refs/heads/master
Commit: c40d854783acf364edcf36fad2b2a9b6d00bdedf
Parents: 055b7a7
Author: Jacky Li <ja...@qq.com>
Authored: Mon Sep 10 22:50:01 2018 +0800
Committer: ravipesala <ra...@gmail.com>
Committed: Fri Sep 14 19:07:14 2018 +0530

----------------------------------------------------------------------
 .../chunk/impl/DimensionRawColumnChunk.java     |   2 +-
 .../apache/carbondata/core/util/ByteUtil.java   |   7 +
 .../apache/carbondata/core/util/CarbonUtil.java |   1 +
 .../core/util/path/CarbonTablePath.java         |   2 +-
 .../dataload/TestLoadDataWithCompression.scala  |   2 +-
 pom.xml                                         |   6 +
 .../loading/model/CarbonLoadModelBuilder.java   |   4 +-
 .../sdk/file/CarbonWriterBuilder.java           |  19 +-
 .../apache/carbondata/sdk/file/TestUtil.java    | 215 +++++++++
 .../apache/carbondata/sdk/file/TestUtil.java    | 209 ---------
 tools/cli/pom.xml                               |  93 ++++
 .../org/apache/carbondata/tool/CarbonCli.java   | 157 +++++++
 .../org/apache/carbondata/tool/DataFile.java    | 432 +++++++++++++++++++
 .../org/apache/carbondata/tool/DataSummary.java | 360 ++++++++++++++++
 .../apache/carbondata/tool/ShardPrinter.java    |  49 +++
 .../apache/carbondata/tool/TablePrinter.java    |  59 +++
 .../apache/carbondata/tool/CarbonCliTest.java   | 199 +++++++++
 17 files changed, 1593 insertions(+), 223 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
index 8791cea..7b1aca1 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
@@ -166,7 +166,7 @@ public class DimensionRawColumnChunk extends AbstractRawColumnChunk {
    * @throws IOException
    * @throws MemoryException
    */
-  private CarbonDictionary getDictionary(LocalDictionaryChunk localDictionaryChunk,
+  public static CarbonDictionary getDictionary(LocalDictionaryChunk localDictionaryChunk,
       Compressor compressor) throws IOException, MemoryException {
     if (null != localDictionaryChunk) {
       List<Encoding> encodings = localDictionaryChunk.getDictionary_meta().getEncoders();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java b/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
index 4efd5ae..702aded 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/ByteUtil.java
@@ -195,6 +195,13 @@ public final class ByteUtil {
       return length1 - length2;
     }
 
+    /**
+     * Return negative value if {@code buffer1} less than {@code buffer2},
+     * return 0 if they are equal, otherwise return positive value.
+     * @param buffer1 value to compare
+     * @param buffer2 value to compare
+     * @return compare result
+     */
     public int compareTo(byte[] buffer1, byte[] buffer2) {
 
       // Short circuit equal case

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index b3af060..dc03944 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -2134,6 +2134,7 @@ public final class CarbonUtil {
         wrapperColumnSchema.setSortColumn(true);
       }
     }
+    wrapperColumnSchema.setColumnProperties(properties);
     wrapperColumnSchema.setFunction(externalColumnSchema.getAggregate_function());
     List<org.apache.carbondata.format.ParentColumnTableRelation> parentColumnTableRelation =
         externalColumnSchema.getParentColumnTableRelations();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
index 6493e34..f1df66a 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
@@ -34,7 +34,7 @@ public class CarbonTablePath {
   private static final String DICTIONARY_EXT = ".dict";
   private static final String DICTIONARY_META_EXT = ".dictmeta";
   private static final String SORT_INDEX_EXT = ".sortindex";
-  private static final String SCHEMA_FILE = "schema";
+  public static final String SCHEMA_FILE = "schema";
   private static final String FACT_DIR = "Fact";
   private static final String SEGMENT_PREFIX = "Segment_";
   private static final String PARTITION_PREFIX = "Part";

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
index 628a0dc..21fbfc1 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/dataload/TestLoadDataWithCompression.scala
@@ -344,7 +344,7 @@ class TestLoadDataWithCompression extends QueryTest with BeforeAndAfterEach with
       .csv(csvDir)
   }
 
-  test("test streaming ingestion with different compressor for each mini-batch") {
+  ignore("test streaming ingestion with different compressor for each mini-batch") {
     createTable(streaming = true)
     val carbonTable = CarbonEnv.getCarbonTable(Some("default"), tableName)(sqlContext.sparkSession)
     val lineNum = 10

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index a6679fc..12a5881 100644
--- a/pom.xml
+++ b/pom.xml
@@ -712,6 +712,12 @@
         <module>datamap/mv/core</module>
       </modules>
     </profile>
+    <profile>
+      <id>tools</id>
+      <modules>
+        <module>tools/cli</module>
+      </modules>
+    </profile>
   </profiles>
 
 </project>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
index bcc904c..ddd54a4 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/loading/model/CarbonLoadModelBuilder.java
@@ -65,7 +65,7 @@ public class CarbonLoadModelBuilder {
    * @param taskNo
    * @return a new CarbonLoadModel instance
    */
-  public CarbonLoadModel build(Map<String, String>  options, long UUID, String taskNo)
+  public CarbonLoadModel build(Map<String, String>  options, long timestamp, String taskNo)
       throws InvalidLoadOptionException, IOException {
     Map<String, String> optionsFinal = LoadOption.fillOptionWithDefaultValue(options);
 
@@ -82,7 +82,7 @@ public class CarbonLoadModelBuilder {
         Maps.getOrDefault(options, "sort_scope", CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT));
     CarbonLoadModel model = new CarbonLoadModel();
     model.setCarbonTransactionalTable(table.isTransactionalTable());
-    model.setFactTimeStamp(UUID);
+    model.setFactTimeStamp(timestamp);
     model.setTaskNo(taskNo);
 
     // we have provided 'fileheader', so it hadoopConf can be null

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
index 28a0dde..5f3e7b8 100644
--- a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/CarbonWriterBuilder.java
@@ -67,7 +67,7 @@ public class CarbonWriterBuilder {
   private int blockletSize;
   private int blockSize;
   private boolean isTransactionalTable;
-  private long UUID;
+  private long timestamp;
   private Map<String, String> options;
   private String taskNo;
   private int localDictionaryThreshold;
@@ -212,13 +212,13 @@ public class CarbonWriterBuilder {
 
   /**
    * to set the timestamp in the carbondata and carbonindex index files
-   * @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
+   * @param timestamp is a timestamp to be used in the carbondata and carbonindex index files.
    * By default set to zero.
    * @return updated CarbonWriterBuilder
    */
-  public CarbonWriterBuilder uniqueIdentifier(long UUID) {
-    Objects.requireNonNull(UUID, "Unique Identifier should not be null");
-    this.UUID = UUID;
+  public CarbonWriterBuilder uniqueIdentifier(long timestamp) {
+    Objects.requireNonNull(timestamp, "Unique Identifier should not be null");
+    this.timestamp = timestamp;
     return this;
   }
 
@@ -538,6 +538,7 @@ public class CarbonWriterBuilder {
 
   public CarbonLoadModel buildLoadModel(Schema carbonSchema)
       throws IOException, InvalidLoadOptionException {
+    timestamp = System.nanoTime();
     Set<String> longStringColumns = null;
     if (options != null && options.get("long_string_columns") != null) {
       longStringColumns =
@@ -552,7 +553,7 @@ public class CarbonWriterBuilder {
       persistSchemaFile(table, CarbonTablePath.getSchemaFilePath(path));
     }
     // build LoadModel
-    return buildLoadModel(table, UUID, taskNo, options);
+    return buildLoadModel(table, timestamp, taskNo, options);
   }
 
   private void validateLongStringColumns(Schema carbonSchema, Set<String> longStringColumns) {
@@ -624,7 +625,7 @@ public class CarbonWriterBuilder {
       dbName = "_tempDB";
     } else {
       dbName = "";
-      tableName = "_tempTable_" + String.valueOf(UUID);
+      tableName = "_tempTable_" + String.valueOf(timestamp);
     }
     TableSchema schema = tableSchemaBuilder.build();
     schema.setTableName(tableName);
@@ -743,13 +744,13 @@ public class CarbonWriterBuilder {
   /**
    * Build a {@link CarbonLoadModel}
    */
-  private CarbonLoadModel buildLoadModel(CarbonTable table, long UUID, String taskNo,
+  private CarbonLoadModel buildLoadModel(CarbonTable table, long timestamp, String taskNo,
       Map<String, String> options) throws InvalidLoadOptionException, IOException {
     if (options == null) {
       options = new HashMap<>();
     }
     CarbonLoadModelBuilder builder = new CarbonLoadModelBuilder(table);
-    CarbonLoadModel build = builder.build(options, UUID, taskNo);
+    CarbonLoadModel build = builder.build(options, timestamp, taskNo);
     setCsvHeader(build);
     return build;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
new file mode 100644
index 0000000..f4c2408
--- /dev/null
+++ b/store/sdk/src/main/java/org/apache/carbondata/sdk/file/TestUtil.java
@@ -0,0 +1,215 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.sdk.file;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.DataInputStream;
+import java.io.File;
+import java.io.FileFilter;
+import java.io.IOException;
+import java.io.InputStream;
+
+import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+
+import org.apache.avro.file.DataFileWriter;
+import org.apache.avro.generic.GenericData;
+import org.apache.avro.generic.GenericDatumReader;
+import org.apache.avro.generic.GenericDatumWriter;
+import org.apache.avro.io.DecoderFactory;
+import org.apache.avro.io.JsonDecoder;
+import org.apache.hadoop.conf.Configuration;
+
+@InterfaceAudience.Developer("Test")
+public class TestUtil {
+
+  public static final Configuration configuration = new Configuration();
+
+  public static GenericData.Record jsonToAvro(String json, String avroSchema) throws IOException {
+    InputStream input = null;
+    DataFileWriter writer = null;
+    ByteArrayOutputStream output = null;
+    try {
+      org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(avroSchema);
+      GenericDatumReader reader = new GenericDatumReader(schema);
+      input = new ByteArrayInputStream(json.getBytes(CarbonCommonConstants.DEFAULT_CHARSET));
+      output = new ByteArrayOutputStream();
+      DataInputStream din = new DataInputStream(input);
+      writer = new DataFileWriter(new GenericDatumWriter());
+      writer.create(schema, output);
+      JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, din);
+      return (GenericData.Record) reader.read(null, decoder);
+    } finally {
+      if (input != null) {
+        input.close();
+      }
+      if (writer != null) {
+        writer.close();
+      }
+    }
+  }
+
+  static void writeFilesAndVerify(Schema schema, String path) {
+    writeFilesAndVerify(schema, path, null);
+  }
+
+  static void writeFilesAndVerify(Schema schema, String path, String[] sortColumns) {
+    writeFilesAndVerify(
+        100, schema, path, sortColumns, false, -1, -1, true);
+  }
+
+  public static void writeFilesAndVerify(
+      int rows, Schema schema, String path, boolean persistSchema) {
+    writeFilesAndVerify(
+        rows, schema, path, null, persistSchema, -1, -1, true);
+  }
+
+  public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema,
+      boolean isTransactionalTable) {
+    writeFilesAndVerify(
+        100, schema, path, null, persistSchema, -1, -1, isTransactionalTable);
+  }
+
+  /**
+   * write file and verify
+   *
+   * @param rows                 number of rows
+   * @param schema               schema
+   * @param path                 table store path
+   * @param persistSchema        whether persist schema
+   * @param isTransactionalTable whether is transactional table
+   */
+  public static void writeFilesAndVerify(
+      int rows, Schema schema, String path, boolean persistSchema, boolean isTransactionalTable) {
+    writeFilesAndVerify(rows, schema, path, null, persistSchema, -1, -1, isTransactionalTable);
+  }
+
+  /**
+   * Invoke CarbonWriter API to write carbon files and assert the file is rewritten
+   * @param rows number of rows to write
+   * @param schema schema of the file
+   * @param path local write path
+   * @param sortColumns sort columns
+   * @param persistSchema true if want to persist schema file
+   * @param blockletSize blockletSize in the file, -1 for default size
+   * @param blockSize blockSize in the file, -1 for default size
+   * @param isTransactionalTable set to true if this is written for Transactional Table.
+   */
+  public static void writeFilesAndVerify(int rows, Schema schema, String path, String[] sortColumns,
+      boolean persistSchema, int blockletSize, int blockSize, boolean isTransactionalTable) {
+    try {
+      CarbonWriterBuilder builder = CarbonWriter.builder()
+          .isTransactionalTable(isTransactionalTable)
+          .outputPath(path);
+      if (sortColumns != null) {
+        builder = builder.sortBy(sortColumns);
+      }
+      if (persistSchema) {
+        builder = builder.persistSchemaFile(true);
+      }
+      if (blockletSize != -1) {
+        builder = builder.withBlockletSize(blockletSize);
+      }
+      if (blockSize != -1) {
+        builder = builder.withBlockSize(blockSize);
+      }
+
+      CarbonWriter writer = builder.buildWriterForCSVInput(schema, configuration);
+
+      for (int i = 0; i < rows; i++) {
+        writer.write(new String[]{
+            "robot" + (i % 10), String.valueOf(i % 3000000), String.valueOf((double) i / 2)});
+      }
+      writer.close();
+    } catch (Exception e) {
+      e.printStackTrace();
+      throw new RuntimeException(e);
+    }
+
+    File segmentFolder = null;
+    if (isTransactionalTable) {
+      segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
+      if (!segmentFolder.exists()) {
+        throw new RuntimeException("Test failed: file not exists");
+      }
+    } else {
+      segmentFolder = new File(path);
+      if (!segmentFolder.exists()) {
+        throw new RuntimeException("Test failed: file not exists");
+      }
+    }
+
+    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
+      @Override public boolean accept(File pathname) {
+        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
+      }
+    });
+    if (dataFiles == null) {
+      throw new RuntimeException("Test failed: dataFiles is null");
+    }
+
+    if (dataFiles.length == 0) {
+      throw new RuntimeException("Test failed: dataFiles is empty");
+    }
+  }
+
+  /**
+   * verify whether the file exists
+   * if delete the file success or file not exists, then return true; otherwise return false
+   *
+   * @return boolean
+   */
+  public static boolean cleanMdtFile() {
+    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
+            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
+    try {
+      if (FileFactory.isFileExist(fileName)) {
+        File file = new File(fileName);
+        return file.delete();
+      } else {
+        return true;
+      }
+    } catch (IOException e) {
+      e.printStackTrace();
+      return false;
+    }
+  }
+
+  /**
+   * verify whether the mdt file exists
+   * if the file exists, then return true; otherwise return false
+   *
+   * @return boolean
+   */
+  public static boolean verifyMdtFile() {
+    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
+            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
+    try {
+      if (FileFactory.isFileExist(fileName)) {
+        return true;
+      }
+      return false;
+    } catch (IOException e) {
+      throw new RuntimeException("IO exception:", e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
----------------------------------------------------------------------
diff --git a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java b/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
deleted file mode 100644
index 2d5dbcd..0000000
--- a/store/sdk/src/test/java/org/apache/carbondata/sdk/file/TestUtil.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.sdk.file;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.File;
-import java.io.FileFilter;
-import java.io.IOException;
-import java.io.InputStream;
-
-import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.path.CarbonTablePath;
-
-import org.apache.avro.file.DataFileWriter;
-import org.apache.avro.generic.GenericData;
-import org.apache.avro.generic.GenericDatumReader;
-import org.apache.avro.generic.GenericDatumWriter;
-import org.apache.avro.io.DecoderFactory;
-import org.apache.avro.io.Encoder;
-import org.apache.avro.io.JsonDecoder;
-import org.apache.hadoop.conf.Configuration;
-import org.junit.Assert;
-
-public class TestUtil {
-
-  public static Configuration configuration = new Configuration();
-
-  public static GenericData.Record jsonToAvro(String json, String avroSchema) throws IOException {
-    InputStream input = null;
-    DataFileWriter writer = null;
-    Encoder encoder = null;
-    ByteArrayOutputStream output = null;
-    try {
-      org.apache.avro.Schema schema = new org.apache.avro.Schema.Parser().parse(avroSchema);
-      GenericDatumReader reader = new GenericDatumReader (schema);
-      input = new ByteArrayInputStream(json.getBytes());
-      output = new ByteArrayOutputStream();
-      DataInputStream din = new DataInputStream(input);
-      writer = new DataFileWriter (new GenericDatumWriter ());
-      writer.create(schema, output);
-      JsonDecoder decoder = DecoderFactory.get().jsonDecoder(schema, din);
-      GenericData.Record datum = null;
-      datum = (GenericData.Record) reader.read(null, decoder);
-      return datum;
-    } finally {
-      try {
-        input.close();
-        writer.close();
-      } catch (Exception e) {
-        e.printStackTrace();
-      }
-    }
-  }
-
-  static void writeFilesAndVerify(Schema schema, String path) {
-    writeFilesAndVerify(schema, path, null);
-  }
-
-  static void writeFilesAndVerify(Schema schema, String path, String[] sortColumns) {
-    writeFilesAndVerify(100, schema, path, sortColumns, false, -1, -1, true);
-  }
-
-  public static void writeFilesAndVerify(int rows, Schema schema, String path, boolean persistSchema) {
-    writeFilesAndVerify(rows, schema, path, null, persistSchema, -1, -1, true);
-  }
-
-  public static void writeFilesAndVerify(Schema schema, String path, boolean persistSchema,
-      boolean isTransactionalTable) {
-    writeFilesAndVerify(100, schema, path, null, persistSchema, -1, -1, isTransactionalTable);
-  }
-
-  /**
-   * write file and verify
-   *
-   * @param rows                 number of rows
-   * @param schema               schema
-   * @param path                 table store path
-   * @param persistSchema        whether persist schema
-   * @param isTransactionalTable whether is transactional table
-   */
-  public static void writeFilesAndVerify(int rows, Schema schema, String path, boolean persistSchema,
-    boolean isTransactionalTable) {
-    writeFilesAndVerify(rows, schema, path, null, persistSchema, -1, -1, isTransactionalTable);
-  }
-
-  /**
-   * Invoke CarbonWriter API to write carbon files and assert the file is rewritten
-   * @param rows number of rows to write
-   * @param schema schema of the file
-   * @param path local write path
-   * @param sortColumns sort columns
-   * @param persistSchema true if want to persist schema file
-   * @param blockletSize blockletSize in the file, -1 for default size
-   * @param blockSize blockSize in the file, -1 for default size
-   * @param isTransactionalTable set to true if this is written for Transactional Table.
-   */
-  public static void writeFilesAndVerify(int rows, Schema schema, String path, String[] sortColumns,
-      boolean persistSchema, int blockletSize, int blockSize, boolean isTransactionalTable) {
-    try {
-      CarbonWriterBuilder builder = CarbonWriter.builder()
-          .isTransactionalTable(isTransactionalTable)
-          .outputPath(path);
-      if (sortColumns != null) {
-        builder = builder.sortBy(sortColumns);
-      }
-      if (persistSchema) {
-        builder = builder.persistSchemaFile(true);
-      }
-      if (blockletSize != -1) {
-        builder = builder.withBlockletSize(blockletSize);
-      }
-      if (blockSize != -1) {
-        builder = builder.withBlockSize(blockSize);
-      }
-
-      CarbonWriter writer = builder.buildWriterForCSVInput(schema, configuration);
-
-      for (int i = 0; i < rows; i++) {
-        writer.write(new String[]{"robot" + (i % 10), String.valueOf(i), String.valueOf((double) i / 2)});
-      }
-      writer.close();
-    } catch (IOException e) {
-      e.printStackTrace();
-      Assert.fail(e.getMessage());
-    } catch (InvalidLoadOptionException l) {
-      l.printStackTrace();
-      Assert.fail(l.getMessage());
-    }
-
-    File segmentFolder = null;
-    if (isTransactionalTable) {
-      segmentFolder = new File(CarbonTablePath.getSegmentPath(path, "null"));
-      Assert.assertTrue(segmentFolder.exists());
-    } else {
-      segmentFolder = new File(path);
-      Assert.assertTrue(segmentFolder.exists());
-    }
-
-    File[] dataFiles = segmentFolder.listFiles(new FileFilter() {
-      @Override public boolean accept(File pathname) {
-        return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT);
-      }
-    });
-    Assert.assertNotNull(dataFiles);
-    Assert.assertTrue(dataFiles.length > 0);
-  }
-
-  /**
-   * verify whether the file exists
-   * if delete the file success or file not exists, then return true; otherwise return false
-   *
-   * @return boolean
-   */
-  public static boolean cleanMdtFile() {
-    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
-            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
-    try {
-      if (FileFactory.isFileExist(fileName)) {
-        File file = new File(fileName);
-        file.delete();
-        return true;
-      } else {
-        return true;
-      }
-    } catch (IOException e) {
-      e.printStackTrace();
-      return false;
-    }
-  }
-
-  /**
-   * verify whether the mdt file exists
-   * if the file exists, then return true; otherwise return false
-   *
-   * @return boolean
-   */
-  public static boolean verifyMdtFile() {
-    String fileName = CarbonProperties.getInstance().getSystemFolderLocation()
-            + CarbonCommonConstants.FILE_SEPARATOR + "datamap.mdtfile";
-    try {
-      if (FileFactory.isFileExist(fileName)) {
-        return true;
-      }
-      return false;
-    } catch (IOException e) {
-      throw new RuntimeException("IO exception:", e);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/pom.xml
----------------------------------------------------------------------
diff --git a/tools/cli/pom.xml b/tools/cli/pom.xml
new file mode 100644
index 0000000..0d00438
--- /dev/null
+++ b/tools/cli/pom.xml
@@ -0,0 +1,93 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.carbondata</groupId>
+    <artifactId>carbondata-parent</artifactId>
+    <version>1.5.0-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>carbondata-cli</artifactId>
+  <name>Apache CarbonData :: CLI</name>
+
+  <properties>
+    <dev.path>${basedir}/../../dev</dev.path>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-store-sdk</artifactId>
+      <version>${project.version}</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.scalatest</groupId>
+      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.8</source>
+          <target>1.8</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-shade-plugin</artifactId>
+        <version>3.1.1</version>
+        <configuration>
+          <shadedArtifactAttached>false</shadedArtifactAttached>
+          <promoteTransitiveDependencies>true</promoteTransitiveDependencies>
+          <outputFile>target/carbondata-cli.jar</outputFile>
+          <transformers>
+            <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+              <manifestEntries>
+                <Main-Class>org.apache.carbondata.tool.CarbonCli</Main-Class>
+              </manifestEntries>
+            </transformer>
+          </transformers>
+          <artifactSet>
+            <includes>
+              <include>*:*</include>
+            </includes>
+          </artifactSet>
+          <filters>
+            <filter>
+              <artifact>*:*</artifact>
+              <excludes>
+                <exclude>org/datanucleus/**</exclude>
+                <exclude>META-INF/*.SF</exclude>
+                <exclude>META-INF/*.DSA</exclude>
+                <exclude>META-INF/*.RSA</exclude>
+                <exclude>META-INF/vfs-providers.xml</exclude>
+                <exclude>io/netty/**</exclude>
+              </excludes>
+            </filter>
+          </filters>
+        </configuration>
+        <executions>
+          <execution>
+            <phase>package</phase>
+            <goals>
+              <goal>shade</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/main/java/org/apache/carbondata/tool/CarbonCli.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/CarbonCli.java b/tools/cli/src/main/java/org/apache/carbondata/tool/CarbonCli.java
new file mode 100644
index 0000000..effb139
--- /dev/null
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/CarbonCli.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.apache.carbondata.common.annotations.InterfaceAudience;
+import org.apache.carbondata.common.annotations.InterfaceStability;
+import org.apache.carbondata.core.memory.MemoryException;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.cli.PosixParser;
+
+/**
+ * CarbonCli tool, which can be run as a standalone java application.
+ */
+@InterfaceAudience.User
+@InterfaceStability.Unstable
+public class CarbonCli {
+
+  private static Options buildOptions() {
+    Option help = new Option("h", "help", false,"print this message");
+    Option path = OptionBuilder.withArgName("path")
+        .hasArg()
+        .withDescription("the path which contains carbondata files, nested folder is supported")
+        .withLongOpt("path")
+        .create("p");
+
+    Option command = OptionBuilder
+        .withArgName("command name")
+        .hasArg()
+        .withDescription("command to execute, supported commands are: summary")
+        .isRequired(true)
+        .create("cmd");
+
+    Option all = new Option("a", "all",false, "print all information");
+    Option schema = new Option("s", "schema",false, "print the schema");
+    Option segment = new Option("m", "showSegment", false, "print segment information");
+    Option tblProperties = new Option("t", "tblProperties", false, "print table properties");
+    Option detail = new Option("b", "blocklet", false, "print blocklet size detail");
+    Option columnName = OptionBuilder
+        .withArgName("column name")
+        .hasArg()
+        .withDescription("column to print statistics")
+        .withLongOpt("column")
+        .create("c");
+
+    Options options = new Options();
+    options.addOption(help);
+    options.addOption(path);
+    options.addOption(command);
+    options.addOption(all);
+    options.addOption(schema);
+    options.addOption(segment);
+    options.addOption(tblProperties);
+    options.addOption(detail);
+    options.addOption(columnName);
+    return options;
+  }
+
+  public static void main(String[] args) {
+    run(args, System.out);
+  }
+
+  static void run(String[] args, PrintStream out) {
+    Options options = buildOptions();
+    CommandLineParser parser = new PosixParser();
+    try {
+      CommandLine line = parser.parse(options, args);
+      if (line.hasOption("h")) {
+        printHelp(options);
+        return;
+      }
+
+      String cmd = line.getOptionValue("cmd");
+      if (cmd.equalsIgnoreCase("summary")) {
+        runSummaryCommand(line, options, out);
+      } else {
+        out.println("command " + cmd + " is not supported");
+        printHelp(options);
+        return;
+      }
+
+      out.flush();
+    } catch (ParseException exp) {
+      out.println("Parsing failed. Reason: " + exp.getMessage());
+    } catch (IOException | MemoryException e) {
+      out.println(out);
+    }
+  }
+
+  private static void printHelp(Options options) {
+    HelpFormatter formatter = new HelpFormatter();
+    formatter.printHelp("CarbonCli", options);
+  }
+
+  private static void runSummaryCommand(CommandLine line, Options options, PrintStream out)
+      throws IOException, MemoryException {
+    String path = "";
+    if (line.hasOption("p")) {
+      path = line.getOptionValue("path");
+    } else {
+      System.err.println("path is required");
+      printHelp(options);
+      return;
+    }
+    DataSummary summary = new DataSummary(path, out);
+    if (summary.isEmpty()) {
+      System.out.println("no data file found");
+      return;
+    }
+    out.println("Input Folder: " + path);
+    summary.printBasic();
+    boolean printAll = false;
+    if (line.hasOption("a")) {
+      printAll = true;
+    }
+    if (line.hasOption("s") || printAll) {
+      summary.printSchema();
+    }
+    if (line.hasOption("m") || printAll) {
+      summary.printSegments();
+    }
+    if (line.hasOption("t") || printAll) {
+      summary.printTableProperties();
+    }
+    if (line.hasOption("b") || printAll) {
+      summary.printBlockletDetail();
+    }
+    if (line.hasOption("c")) {
+      String columName = line.getOptionValue("c");
+      summary.printColumnStats(columName);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/main/java/org/apache/carbondata/tool/DataFile.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/DataFile.java b/tools/cli/src/main/java/org/apache/carbondata/tool/DataFile.java
new file mode 100644
index 0000000..ea67829
--- /dev/null
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/DataFile.java
@@ -0,0 +1,432 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.math.BigDecimal;
+import java.nio.ByteBuffer;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.apache.carbondata.core.datastore.FileReader;
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
+import org.apache.carbondata.core.datastore.compression.Compressor;
+import org.apache.carbondata.core.datastore.compression.CompressorFactory;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.reader.CarbonFooterReaderV3;
+import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.scan.result.vector.CarbonDictionary;
+import org.apache.carbondata.core.util.ByteUtil;
+import org.apache.carbondata.core.util.CarbonMetadataUtil;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.DataTypeUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.format.BlockletIndex;
+import org.apache.carbondata.format.BlockletInfo3;
+import org.apache.carbondata.format.DataChunk3;
+import org.apache.carbondata.format.FileFooter3;
+import org.apache.carbondata.format.FileHeader;
+import org.apache.carbondata.format.LocalDictionaryChunk;
+
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.FILE_SEPARATOR;
+
+/**
+ * Contains information extracted from a .carbondata file
+ */
+class DataFile {
+  // file full path
+  private String filePath;
+
+  // reader for this file
+  private FileReader fileReader;
+
+  // shard name
+  private String shardName;
+
+  // part id
+  private String partNo;
+
+  private long fileSizeInBytes;
+  private long footerSizeInBytes;
+
+  // size in bytes of each blocklet
+  private LinkedList<Long> blockletSizeInBytes = new LinkedList<>();
+
+  // data size in bytes of each column in each blocklet
+  private LinkedList<LinkedList<Long>> columnDataSizeInBytes = new LinkedList<>();
+  // meta size (DataChunk3) in bytes of each column in each blocklet
+  private LinkedList<LinkedList<Long>> columnMetaSizeInBytes = new LinkedList<>();
+
+  private FileHeader header;
+  private FileFooter3 footer;
+  private List<ColumnSchema> schema;
+  private List<Blocklet> blocklets;
+
+  DataFile(CarbonFile file) throws IOException {
+    this.fileSizeInBytes = file.getSize();
+
+    FileHeader header = null;
+    FileFooter3 footer = null;
+    try {
+      header = readHeader(file);
+    } catch (IOException e) {
+      throw new IOException("failed to read header in " + file.getPath(), e);
+    }
+    if (header.isSetSync_marker()) {
+      // if sync_marker is set, it is a streaming format file
+      throw new UnsupportedOperationException("streaming file is not supported");
+    }
+    try {
+      footer = readFooter(file);
+    } catch (IOException e) {
+      throw new IOException("failed to read footer in " + file.getPath(), e);
+    }
+
+    this.filePath = file.getPath();
+    this.header = header;
+    this.footer = footer;
+    String filePath = file.getPath();
+    // folder path that contains this file
+    String fileName = filePath.substring(filePath.lastIndexOf(FILE_SEPARATOR));
+    this.shardName = CarbonTablePath.getShardName(fileName);
+    this.partNo = CarbonTablePath.DataFileUtil.getPartNo(fileName);
+
+    // calculate blocklet size and column size
+    // first calculate the header size, it equals the offset of first
+    // column chunk in first blocklet
+    long headerSizeInBytes = footer.blocklet_info_list3.get(0).column_data_chunks_offsets.get(0);
+    long previousOffset = headerSizeInBytes;
+    for (BlockletInfo3 blockletInfo3 : footer.blocklet_info_list3) {
+      // calculate blocklet size in bytes
+      long blockletOffset = blockletInfo3.column_data_chunks_offsets.get(0);
+      blockletSizeInBytes.add(blockletOffset - previousOffset);
+      previousOffset = blockletOffset;
+
+      // calculate column size in bytes for each column
+      LinkedList<Long> columnDataSize = new LinkedList<>();
+      LinkedList<Long> columnMetaSize = new LinkedList<>();
+      long previousChunkOffset = blockletInfo3.column_data_chunks_offsets.get(0);
+      for (int i = 0; i < schema.size(); i++) {
+        columnDataSize.add(blockletInfo3.column_data_chunks_offsets.get(i) - previousChunkOffset);
+        columnMetaSize.add(blockletInfo3.column_data_chunks_length.get(i).longValue());
+      }
+      // last column chunk data size
+      columnDataSize.add(fileSizeInBytes - footerSizeInBytes - previousChunkOffset);
+      columnDataSize.removeFirst();
+      this.columnDataSizeInBytes.add(columnDataSize);
+      this.columnMetaSizeInBytes.add(columnMetaSize);
+
+    }
+    // last blocklet size
+    blockletSizeInBytes.add(
+        fileSizeInBytes - footerSizeInBytes - headerSizeInBytes - previousOffset);
+    this.blockletSizeInBytes.removeFirst();
+
+    assert (blockletSizeInBytes.size() == getNumBlocklets());
+  }
+
+  private FileHeader readHeader(CarbonFile dataFile) throws IOException {
+    CarbonHeaderReader reader = new CarbonHeaderReader(dataFile.getPath());
+    this.schema = reader.readSchema();
+    return reader.readHeader();
+  }
+
+  private FileFooter3 readFooter(CarbonFile dataFile) throws IOException {
+    this.fileReader = FileFactory.getFileHolder(FileFactory.getFileType(dataFile.getPath()));
+    ByteBuffer buffer = fileReader.readByteBuffer(FileFactory.getUpdatedFilePath(
+        dataFile.getPath()), dataFile.getSize() - 8, 8);
+    long footerOffset = buffer.getLong();
+    this.footerSizeInBytes = this.fileSizeInBytes - footerOffset;
+    CarbonFooterReaderV3 footerReader =
+        new CarbonFooterReaderV3(dataFile.getAbsolutePath(), footerOffset);
+    return footerReader.readFooterVersion3();
+  }
+
+  String getFilePath() {
+    return filePath;
+  }
+
+  String getShardName() {
+    return shardName;
+  }
+
+  String getPartNo() {
+    return partNo;
+  }
+
+  FileHeader getHeader() {
+    return header;
+  }
+
+  FileFooter3 getFooter() {
+    return footer;
+  }
+
+  List<ColumnSchema> getSchema() {
+    return schema;
+  }
+
+  private int getNumBlocklets() {
+    return footer.blocklet_info_list3.size();
+  }
+
+  Long getBlockletSizeInBytes(int blockletId) {
+    if (blockletId < 0 || blockletId >= getNumBlocklets()) {
+      throw new IllegalArgumentException("invalid blockletId: " + blockletId);
+    }
+    return blockletSizeInBytes.get(blockletId);
+  }
+
+  Long getColumnDataSizeInBytes(int blockletId, int columnIndex) {
+    if (blockletId < 0 || blockletId >= getNumBlocklets()) {
+      throw new IllegalArgumentException("invalid blockletId: " + blockletId);
+    }
+    LinkedList<Long> columnSize = this.columnDataSizeInBytes.get(blockletId);
+    if (columnIndex >= columnSize.size()) {
+      throw new IllegalArgumentException("invalid columnIndex: " + columnIndex);
+    }
+    return columnSize.get(columnIndex);
+  }
+
+  Long getColumnMetaSizeInBytes(int blockletId, int columnIndex) {
+    if (blockletId < 0 || blockletId >= getNumBlocklets()) {
+      throw new IllegalArgumentException("invalid blockletId: " + blockletId);
+    }
+    LinkedList<Long> columnSize = this.columnMetaSizeInBytes.get(blockletId);
+    if (columnIndex >= columnSize.size()) {
+      throw new IllegalArgumentException("invalid columnIndex: " + columnIndex);
+    }
+    return columnSize.get(columnIndex);
+  }
+
+  void initAllBlockletStats(String columnName) throws IOException, MemoryException {
+    int columnIndex = -1;
+    ColumnSchema column = null;
+    for (int i = 0; i < schema.size(); i++) {
+      if (schema.get(i).getColumnName().equalsIgnoreCase(columnName)) {
+        columnIndex = i;
+        column = schema.get(i);
+      }
+    }
+    if (column == null) {
+      throw new IllegalArgumentException("column name " + columnName + " not exist");
+    }
+    List<Blocklet> blocklets = new LinkedList<>();
+    for (int blockletId = 0; blockletId < footer.blocklet_index_list.size(); blockletId++) {
+      blocklets.add(new Blocklet(this, blockletId, column, columnIndex, footer));
+    }
+    this.blocklets = blocklets;
+  }
+
+  List<Blocklet> getAllBlocklets() {
+    return blocklets;
+  }
+
+  // Column chunk in one blocklet
+  class ColumnChunk {
+
+    ColumnSchema column;
+
+    // true if local dictionary is used in this column chunk
+    boolean localDict;
+
+    // average length in bytes for all pages in this column chunk
+    long avgPageLengthInBytes;
+
+    // the size in bytes of local dictionary for this column chunk
+    long blocketletDictionarySize;
+
+    // the number of entry in local dictionary for this column chunk
+    long blockletDictionaryEntries;
+
+    // min/max stats of this column chunk
+    byte[] min, max;
+
+    // percentage of min/max comparing to min/max scope collected in all blocklets
+    // they are set after calculation in DataSummary
+    double minPercentage, maxPercentage;
+
+    /**
+     * Constructor
+     * @param blockletInfo blocklet info which this column chunk belongs to
+     * @param index blocklet index which this column chunk belongs to
+     * @param column column schema of this column chunk
+     * @param columnIndex column index of this column chunk
+     */
+    ColumnChunk(BlockletInfo3 blockletInfo, BlockletIndex index, ColumnSchema column,
+        int columnIndex) throws IOException, MemoryException {
+      this.column = column;
+      min = index.min_max_index.min_values.get(columnIndex).array();
+      max = index.min_max_index.max_values.get(columnIndex).array();
+
+      // read the column chunk metadata: DataChunk3
+      ByteBuffer buffer = fileReader.readByteBuffer(
+          filePath, blockletInfo.column_data_chunks_offsets.get(columnIndex),
+          blockletInfo.column_data_chunks_length.get(columnIndex));
+      DataChunk3 dataChunk = CarbonUtil.readDataChunk3(new ByteArrayInputStream(buffer.array()));
+      this.localDict = dataChunk.isSetLocal_dictionary();
+      if (this.localDict) {
+        String compressorName = CarbonMetadataUtil.getCompressorNameFromChunkMeta(
+            dataChunk.data_chunk_list.get(0).chunk_meta);
+        LocalDictionaryChunk dictionaryChunk = dataChunk.local_dictionary;
+        Compressor comp = CompressorFactory.getInstance().getCompressor(compressorName);
+        CarbonDictionary dictionary = DimensionRawColumnChunk.getDictionary(dictionaryChunk, comp);
+        blockletDictionaryEntries = dictionary.getDictionaryActualSize();
+        blocketletDictionarySize = dataChunk.local_dictionary.dictionary_data.array().length;
+      }
+      long pageLength = 0;
+      for (int size : dataChunk.page_length) {
+        pageLength += size;
+      }
+      avgPageLengthInBytes = pageLength / dataChunk.page_length.size();
+    }
+
+    void setMinPercentage(double minPercentage) {
+      this.minPercentage = minPercentage;
+    }
+
+    void setMaxPercentage(double maxPercentage) {
+      this.maxPercentage = maxPercentage;
+    }
+
+    double getMinPercentage() {
+      return minPercentage;
+    }
+
+    double getMaxPercentage() {
+      return maxPercentage;
+    }
+
+    DataType getDataType() {
+      return column.getDataType();
+    }
+
+    byte[] min(byte[] minValue) {
+      if (minValue == null) {
+        return min;
+      } else {
+        return ByteUtil.UnsafeComparer.INSTANCE.compareTo(minValue, min) < 0 ? minValue : min;
+      }
+    }
+
+    byte[] max(byte[] maxValue) {
+      if (maxValue == null) {
+        return max;
+      } else {
+        return ByteUtil.UnsafeComparer.INSTANCE.compareTo(maxValue, max) > 0 ? maxValue : max;
+      }
+    }
+  }
+
+  class Blocklet {
+    DataFile file;
+    int id;
+    ColumnChunk columnChunk;
+
+    Blocklet(DataFile file, int blockletId, ColumnSchema column, int columnIndex,
+        FileFooter3 footer) throws IOException, MemoryException {
+      this.file = file;
+      this.id = blockletId;
+      BlockletIndex index = footer.blocklet_index_list.get(blockletId);
+      BlockletInfo3 info = footer.blocklet_info_list3.get(blockletId);
+      this.columnChunk = new ColumnChunk(info, index, column, columnIndex);
+    }
+
+    String getShardName() {
+      return file.getShardName();
+    }
+
+    ColumnChunk getColumnChunk() {
+      return columnChunk;
+    }
+
+    // compute and set min and max percentage for this blocklet
+    void computePercentage(byte[] shardMin, byte[] shardMax) {
+      double min = computePercentage(columnChunk.min, shardMin, shardMax, columnChunk.column);
+      double max = computePercentage(columnChunk.max, shardMin, shardMax, columnChunk.column);
+      columnChunk.setMinPercentage(min);
+      columnChunk.setMaxPercentage(max);
+    }
+
+    /**
+     * Calculate data percentage in [min, max] scope based on data type
+     * @param data data to calculate the percentage
+     * @param min min value
+     * @param max max value
+     * @param column column schema including data type
+     * @return result
+     */
+    private double computePercentage(byte[] data, byte[] min, byte[] max, ColumnSchema column) {
+      if (column.getDataType() == DataTypes.STRING) {
+        // for string, we do not calculate
+        return 0;
+      } else if (DataTypes.isDecimal(column.getDataType())) {
+        BigDecimal minValue = DataTypeUtil.byteToBigDecimal(min);
+        BigDecimal dataValue = DataTypeUtil.byteToBigDecimal(data).subtract(minValue);
+        BigDecimal factorValue = DataTypeUtil.byteToBigDecimal(max).subtract(minValue);
+        return dataValue.divide(factorValue).doubleValue();
+      }
+      double dataValue, minValue, factorValue;
+      if (column.getDataType() == DataTypes.SHORT) {
+        minValue = ByteUtil.toShort(min, 0);
+        dataValue = ByteUtil.toShort(data, 0) - minValue;
+        factorValue = ByteUtil.toShort(max, 0) - ByteUtil.toShort(min, 0);
+      } else if (column.getDataType() == DataTypes.INT) {
+        if (column.isSortColumn()) {
+          minValue = ByteUtil.toXorInt(min, 0, min.length);
+          dataValue = ByteUtil.toXorInt(data, 0, data.length) - minValue;
+          factorValue = ByteUtil.toXorInt(max, 0, max.length) - ByteUtil.toXorInt(min, 0, min.length);
+        } else {
+          minValue = ByteUtil.toLong(min, 0, min.length);
+          dataValue = ByteUtil.toLong(data, 0, data.length) - minValue;
+          factorValue = ByteUtil.toLong(max, 0, max.length) - ByteUtil.toLong(min, 0, min.length);
+        }
+      } else if (column.getDataType() == DataTypes.LONG) {
+        minValue = ByteUtil.toLong(min, 0, min.length);
+        dataValue = ByteUtil.toLong(data, 0, data.length) - minValue;
+        factorValue = ByteUtil.toLong(max, 0, max.length) - ByteUtil.toLong(min, 0, min.length);
+      } else if (column.getDataType() == DataTypes.DATE) {
+        minValue = ByteUtil.toInt(min, 0, min.length);
+        dataValue = ByteUtil.toInt(data, 0, data.length) - minValue;
+        factorValue = ByteUtil.toInt(max, 0, max.length) - ByteUtil.toInt(min, 0, min.length);
+      } else if (column.getDataType() == DataTypes.TIMESTAMP) {
+        minValue = ByteUtil.toLong(min, 0, min.length);
+        dataValue = ByteUtil.toLong(data, 0, data.length) - minValue;
+        factorValue = ByteUtil.toLong(max, 0, max.length) - ByteUtil.toLong(min, 0, min.length);
+      } else if (column.getDataType() == DataTypes.DOUBLE) {
+        minValue = ByteUtil.toDouble(min, 0, min.length);
+        dataValue = ByteUtil.toDouble(data, 0, data.length) - minValue;
+        factorValue = ByteUtil.toDouble(max, 0, max.length) - ByteUtil.toDouble(min, 0, min.length);
+      } else {
+        throw new UnsupportedOperationException("data type: " + column.getDataType());
+      }
+
+      if (factorValue == 0d) {
+        return 1;
+      }
+      return dataValue / factorValue;
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/main/java/org/apache/carbondata/tool/DataSummary.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/DataSummary.java b/tools/cli/src/main/java/org/apache/carbondata/tool/DataSummary.java
new file mode 100644
index 0000000..7ca6951
--- /dev/null
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/DataSummary.java
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.IOException;
+import java.io.PrintStream;
+import java.nio.charset.Charset;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.carbondata.common.Strings;
+import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
+import org.apache.carbondata.core.reader.CarbonHeaderReader;
+import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
+import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+import org.apache.carbondata.format.BlockletInfo3;
+import org.apache.carbondata.format.FileFooter3;
+import org.apache.carbondata.format.FileHeader;
+import org.apache.carbondata.format.TableInfo;
+
+import static org.apache.carbondata.core.constants.CarbonCommonConstants.DEFAULT_CHARSET;
+
+/**
+ * Data Summary command implementation for {@link CarbonCli}
+ */
+class DataSummary {
+  private String dataFolder;
+  private PrintStream out;
+
+  private long numBlock;
+  private long numShard;
+  private long numBlocklet;
+  private long numPage;
+  private long numRow;
+  private long totalDataSize;
+
+  // file path mapping to file object
+  private LinkedHashMap<String, DataFile> dataFiles = new LinkedHashMap<>();
+  private CarbonFile tableStatusFile;
+  private CarbonFile schemaFile;
+
+  DataSummary(String dataFolder, PrintStream out) throws IOException {
+    this.dataFolder = dataFolder;
+    this.out = out;
+    collectDataFiles();
+  }
+
+  private boolean isColumnarFile(String fileName) {
+    // if the timestamp in file name is "0", it is a streaming file
+    return fileName.endsWith(CarbonTablePath.CARBON_DATA_EXT) &&
+        !CarbonTablePath.DataFileUtil.getTimeStampFromFileName(fileName).equals("0");
+  }
+
+  private boolean isStreamFile(String fileName) {
+    // if the timestamp in file name is "0", it is a streaming file
+    return fileName.endsWith(CarbonTablePath.CARBON_DATA_EXT) &&
+        CarbonTablePath.DataFileUtil.getTimeStampFromFileName(fileName).equals("0");
+  }
+
+  private void collectDataFiles() throws IOException {
+    Set<String> shards = new HashSet<>();
+    CarbonFile folder = FileFactory.getCarbonFile(dataFolder);
+    List<CarbonFile> files = folder.listFiles(true);
+    List<DataFile> unsortedFiles = new ArrayList<>();
+    for (CarbonFile file : files) {
+      if (isColumnarFile(file.getName())) {
+        DataFile dataFile = new DataFile(file);
+        unsortedFiles.add(dataFile);
+        collectNum(dataFile.getFooter());
+        shards.add(dataFile.getShardName());
+        totalDataSize += file.getSize();
+      } else if (file.getName().endsWith(CarbonTablePath.TABLE_STATUS_FILE)) {
+        tableStatusFile = file;
+      } else if (file.getName().startsWith(CarbonTablePath.SCHEMA_FILE)) {
+        schemaFile = file;
+      } else if (isStreamFile(file.getName())) {
+        out.println("WARN: input path contains streaming file, this tool does not support it yet, "
+            + "skipping it...");
+      }
+    }
+    unsortedFiles.sort((o1, o2) -> {
+      if (o1.getShardName().equalsIgnoreCase(o2.getShardName())) {
+        return Integer.parseInt(o1.getPartNo()) - Integer.parseInt(o2.getPartNo());
+      } else {
+        return o1.getShardName().compareTo(o2.getShardName());
+      }
+    });
+    for (DataFile collectedFile : unsortedFiles) {
+      this.dataFiles.put(collectedFile.getFilePath(), collectedFile);
+    }
+    numShard = shards.size();
+  }
+
+  private void collectNum(FileFooter3 footer) {
+    numBlock++;
+    numBlocklet += footer.blocklet_index_list.size();
+    numRow += footer.num_rows;
+    for (BlockletInfo3 blockletInfo3 : footer.blocklet_info_list3) {
+      numPage += blockletInfo3.number_number_of_pages;
+    }
+  }
+
+  void printBasic() {
+    out.println("## Summary");
+    out.println(
+        String.format("total: %,d blocks, %,d shards, %,d blocklets, %,d pages, %,d rows, %s",
+            numBlock, numShard, numBlocklet, numPage, numRow, Strings.formatSize(totalDataSize)));
+    out.println(
+        String.format("avg: %s/block, %s/blocklet, %,d rows/block, %,d rows/blocklet",
+            Strings.formatSize(totalDataSize / numBlock),
+            Strings.formatSize(totalDataSize / numBlocklet),
+            numRow / numBlock,
+            numRow / numBlocklet));
+  }
+
+  void printSchema() throws IOException {
+    if (dataFiles.size() > 0) {
+      String firstFile = dataFiles.keySet().iterator().next();
+      CarbonFile file = FileFactory.getCarbonFile(firstFile);
+      out.println();
+      out.println("## Schema");
+      out.println(String.format("schema in %s", file.getName()));
+      CarbonHeaderReader reader = new CarbonHeaderReader(file.getPath());
+      FileHeader header = reader.readHeader();
+      out.println("version: V" + header.version);
+      out.println("timestamp: " + new java.sql.Timestamp(header.time_stamp));
+      List<ColumnSchema> columns = reader.readSchema();
+      TablePrinter printer = new TablePrinter(
+          new String[]{"Column Name", "Data Type", "Column Type",
+              "SortColumn", "Encoding", "Ordinal", "Id"});
+      for (ColumnSchema column : columns) {
+        String shortColumnId = "NA";
+        if (column.getColumnUniqueId() != null && column.getColumnUniqueId().length() > 4) {
+          shortColumnId = "*" +
+              column.getColumnUniqueId().substring(column.getColumnUniqueId().length() - 4);
+        }
+        printer.addRow(new String[]{
+            column.getColumnName(),
+            column.getDataType().getName(),
+            column.isDimensionColumn() ? "dimension" : "measure",
+            String.valueOf(column.isSortColumn()),
+            column.getEncodingList().toString(),
+            Integer.toString(column.getSchemaOrdinal()),
+            shortColumnId
+        });
+      }
+      printer.printFormatted(out);
+    }
+  }
+
+  void printSegments() throws IOException {
+    out.println();
+    out.println("## Segment");
+    if (tableStatusFile != null) {
+      // first collect all information in memory then print a formatted table
+      LoadMetadataDetails[] segments =
+          SegmentStatusManager.readTableStatusFile(tableStatusFile.getPath());
+      TablePrinter printer = new TablePrinter(
+          new String[]{"SegmentID", "Status", "Load Start", "Load End",
+              "Merged To", "Format", "Data Size", "Index Size"});
+      for (LoadMetadataDetails segment : segments) {
+        String dataSize, indexSize;
+        if (segment.getDataSize() == null) {
+          dataSize = "NA";
+        } else {
+          dataSize = Strings.formatSize(Long.parseLong(segment.getDataSize()));
+        }
+        if (segment.getIndexSize() == null) {
+          indexSize = "NA";
+        } else {
+          indexSize = Strings.formatSize(Long.parseLong(segment.getIndexSize()));
+        }
+        printer.addRow(new String[]{
+            segment.getLoadName(),
+            segment.getSegmentStatus().toString(),
+            new java.sql.Date(segment.getLoadStartTime()).toString(),
+            new java.sql.Date(segment.getLoadEndTime()).toString(),
+            segment.getMergedLoadName() == null ? "NA" : segment.getMergedLoadName(),
+            segment.getFileFormat().toString(),
+            dataSize,
+            indexSize}
+        );
+      }
+      printer.printFormatted(out);
+    } else {
+      out.println("table status file not found");
+    }
+  }
+
+  void printTableProperties() throws IOException {
+    out.println();
+    out.println("## Table Properties");
+    if (schemaFile != null) {
+      TableInfo thriftTableInfo = CarbonUtil.readSchemaFile(schemaFile.getPath());
+      Map<String, String> tblProperties = thriftTableInfo.fact_table.tableProperties;
+      TablePrinter printer = new TablePrinter(
+          new String[]{"Property Name", "Property Value"});
+      for (Map.Entry<String, String> entry : tblProperties.entrySet()) {
+        printer.addRow(new String[] {
+            String.format("'%s'", entry.getKey()),
+            String.format("'%s'", entry.getValue())
+        });
+      }
+      printer.printFormatted(out);
+    } else {
+      out.println("schema file not found");
+    }
+  }
+
+  void printBlockletDetail() {
+    out.println();
+    out.println("## Block Detail");
+
+    ShardPrinter printer = new ShardPrinter(new String[]{
+        "BLK", "BLKLT", "NumPages", "NumRows", "Size"
+    });
+
+    for (Map.Entry<String, DataFile> entry : dataFiles.entrySet()) {
+      DataFile file = entry.getValue();
+      FileFooter3 footer = file.getFooter();
+      for (int blockletId = 0; blockletId < footer.blocklet_info_list3.size(); blockletId++) {
+        BlockletInfo3 blocklet = footer.blocklet_info_list3.get(blockletId);
+        printer.addRow(file.getShardName(), new String[]{
+            file.getPartNo(),
+            String.valueOf(blockletId),
+            String.format("%,d", blocklet.number_number_of_pages),
+            String.format("%,d", blocklet.num_rows),
+            Strings.formatSize(file.getBlockletSizeInBytes(blockletId))
+        });
+      }
+    }
+    printer.printFormatted(out);
+  }
+
+  private int getColumnIndex(String columnName) {
+    if (dataFiles.size() > 0) {
+      List<ColumnSchema> columns = dataFiles.entrySet().iterator().next().getValue().getSchema();
+      for (int i = 0; i < columns.size(); i++) {
+        if (columns.get(i).getColumnName().equalsIgnoreCase(columnName)) {
+          return i;
+        }
+      }
+    }
+    throw new RuntimeException("schema for column " + columnName + " not found");
+  }
+
+  void printColumnStats(String columnName) throws IOException, MemoryException {
+    out.println();
+    out.println("## Column Statistics for '" + columnName + "'");
+    for (DataFile dataFile : dataFiles.values()) {
+      dataFile.initAllBlockletStats(columnName);
+    }
+    collectAllBlockletStats(dataFiles.values());
+
+    int columnIndex = getColumnIndex(columnName);
+    String[] header = new String[]{"BLK", "BLKLT", "Meta Size", "Data Size",
+        "LocalDict", "DictEntries", "DictSize", "AvgPageSize", "Min%", "Max%"};
+
+    ShardPrinter printer = new ShardPrinter(header);
+    for (Map.Entry<String, DataFile> entry : dataFiles.entrySet()) {
+      DataFile file = entry.getValue();
+      for (DataFile.Blocklet blocklet : file.getAllBlocklets()) {
+        String min, max;
+        if (blocklet.getColumnChunk().getDataType() == DataTypes.STRING) {
+          min = new String(blocklet.getColumnChunk().min, Charset.forName(DEFAULT_CHARSET));
+          max = new String(blocklet.getColumnChunk().max, Charset.forName(DEFAULT_CHARSET));
+        } else {
+          min = String.format("%.1f", blocklet.getColumnChunk().getMinPercentage() * 100);
+          max = String.format("%.1f", blocklet.getColumnChunk().getMaxPercentage() * 100);
+        }
+        printer.addRow(
+            blocklet.getShardName(),
+            new String[]{
+                file.getPartNo(),
+                String.valueOf(blocklet.id),
+                Strings.formatSize(file.getColumnMetaSizeInBytes(blocklet.id, columnIndex)),
+                Strings.formatSize(file.getColumnDataSizeInBytes(blocklet.id, columnIndex)),
+                String.valueOf(blocklet.getColumnChunk().localDict),
+                String.valueOf(blocklet.getColumnChunk().blockletDictionaryEntries),
+                Strings.formatSize(blocklet.getColumnChunk().blocketletDictionarySize),
+                Strings.formatSize(blocklet.getColumnChunk().avgPageLengthInBytes),
+                min,
+                max}
+        );
+      }
+    }
+    printer.printFormatted(out);
+  }
+
+  private void collectAllBlockletStats(Collection<DataFile> dataFiles) {
+    // shard name mapping to blocklets belong to same shard
+    Map<String, List<DataFile.Blocklet>> shards = new HashMap<>();
+
+    // collect blocklets based on shard name
+    for (DataFile dataFile : dataFiles) {
+      List<DataFile.Blocklet> blocklets = dataFile.getAllBlocklets();
+      List<DataFile.Blocklet> existing = shards.get(dataFile.getShardName());
+      if (existing == null) {
+        existing = new LinkedList<>();
+      }
+      existing.addAll(blocklets);
+      shards.put(dataFile.getShardName(), existing);
+    }
+
+    // calculate min/max for each shard
+    Map<String, byte[]> shardMinMap = new HashMap<>();
+    Map<String, byte[]> shardMaxMap = new HashMap<>();
+    for (Map.Entry<String, List<DataFile.Blocklet>> shard : shards.entrySet()) {
+      byte[] shardMin = null;
+      byte[] shardMax = null;
+      for (DataFile.Blocklet blocklet : shard.getValue()) {
+        shardMin = blocklet.getColumnChunk().min(shardMin);
+        shardMax = blocklet.getColumnChunk().max(shardMax);
+      }
+      shardMinMap.put(shard.getKey(), shardMin);
+      shardMaxMap.put(shard.getKey(), shardMax);
+    }
+
+    // calculate min/max percentage for each blocklet
+    for (Map.Entry<String, List<DataFile.Blocklet>> shard : shards.entrySet()) {
+      byte[] shardMin = shardMinMap.get(shard.getKey());
+      byte[] shardMax = shardMaxMap.get(shard.getKey());
+      for (DataFile.Blocklet blocklet : shard.getValue()) {
+        blocklet.computePercentage(shardMin, shardMax);
+      }
+    }
+  }
+
+  public boolean isEmpty() {
+    return dataFiles.size() == 0;
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/main/java/org/apache/carbondata/tool/ShardPrinter.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/ShardPrinter.java b/tools/cli/src/main/java/org/apache/carbondata/tool/ShardPrinter.java
new file mode 100644
index 0000000..05b6feb
--- /dev/null
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/ShardPrinter.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.Map;
+
+class ShardPrinter {
+  private Map<String, TablePrinter> shardPrinter = new HashMap<>();
+  private String[] header;
+
+  ShardPrinter(String[] header) {
+    this.header = header;
+  }
+
+  void addRow(String shardName, String[] row) {
+    TablePrinter printer = shardPrinter.get(shardName);
+    if (printer == null) {
+      printer = new TablePrinter(header);
+      shardPrinter.put(shardName, printer);
+    }
+    printer.addRow(row);
+  }
+
+  void printFormatted(PrintStream out) {
+    int shardId = 1;
+    for (Map.Entry<String, TablePrinter> entry : shardPrinter.entrySet()) {
+      out.println(String.format("Shard #%d (%s)", shardId++, entry.getKey()));
+      entry.getValue().printFormatted(out);
+      out.println();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/main/java/org/apache/carbondata/tool/TablePrinter.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/main/java/org/apache/carbondata/tool/TablePrinter.java b/tools/cli/src/main/java/org/apache/carbondata/tool/TablePrinter.java
new file mode 100644
index 0000000..2e02d2f
--- /dev/null
+++ b/tools/cli/src/main/java/org/apache/carbondata/tool/TablePrinter.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.PrintStream;
+import java.util.LinkedList;
+import java.util.List;
+
+class TablePrinter {
+  private List<String[]> table = new LinkedList<>();
+
+  /**
+   * create a new Table Printer
+   * @param header table header
+   */
+  TablePrinter(String[] header) {
+    this.table.add(header);
+  }
+
+  void addRow(String[] row) {
+    table.add(row);
+  }
+
+  void printFormatted(PrintStream out) {
+    // calculate the max length of each output field in the table
+    int padding = 2;
+    int[] maxLength = new int[table.get(0).length];
+    for (int i = 0; i < table.get(0).length; i++) {
+      for (String[] row : table) {
+        maxLength[i] = Math.max(maxLength[i], row[i].length());
+      }
+    }
+
+    for (String[] row : table) {
+      for (int i = 0; i < row.length; i++) {
+        out.print(row[i]);
+        for (int num = 0; num < maxLength[i] + padding - row[i].length(); num++) {
+          out.print(" ");
+        }
+      }
+      out.println();
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c40d8547/tools/cli/src/test/java/org/apache/carbondata/tool/CarbonCliTest.java
----------------------------------------------------------------------
diff --git a/tools/cli/src/test/java/org/apache/carbondata/tool/CarbonCliTest.java b/tools/cli/src/test/java/org/apache/carbondata/tool/CarbonCliTest.java
new file mode 100644
index 0000000..0d0d6b5
--- /dev/null
+++ b/tools/cli/src/test/java/org/apache/carbondata/tool/CarbonCliTest.java
@@ -0,0 +1,199 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.tool;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintStream;
+
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.sdk.file.Field;
+import org.apache.carbondata.sdk.file.Schema;
+import org.apache.carbondata.sdk.file.TestUtil;
+
+import org.apache.commons.io.FileUtils;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class CarbonCliTest {
+
+  private String path = "./CarbonCliTest";
+
+  @Before
+  public void before() throws IOException {
+    FileUtils.deleteDirectory(new File(path));
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    TestUtil.writeFilesAndVerify(5000000, new Schema(fields), path, new String[]{"age"},
+        true, 3, 8, true);
+    TestUtil.writeFilesAndVerify(5000000, new Schema(fields), path, new String[]{"age"},
+        true, 3, 8, true);
+  }
+
+  @Test
+  public void testInvalidCmd() {
+    String[] args = {"-cmd", "DD", "-p", path};
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    PrintStream stream = new PrintStream(out);
+    CarbonCli.run(args, stream);
+    String output = new String(out.toByteArray());
+    Assert.assertTrue(output.contains("command DD is not supported"));
+
+    String[] args2 = {"-p", path};
+    out = new ByteArrayOutputStream();
+    stream = new PrintStream(out);
+    CarbonCli.run(args2, stream);
+    output = new String(out.toByteArray());
+    Assert.assertTrue(output.contains("Parsing failed. Reason: Missing required option: cmd"));
+  }
+
+  @Test
+  public void testOutputIndividual() {
+    String[] args = {"-cmd", "summary", "-p", path};
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    PrintStream stream = new PrintStream(out);
+    CarbonCli.run(args, stream);
+    String output = new String(out.toByteArray());
+    Assert.assertTrue(
+        output.contains(
+            "Input Folder: ./CarbonCliTest\n"
+          + "## Summary\n"
+          + "total: 6 blocks, 2 shards, 12 blocklets, 314 pages, 10,000,000 rows, 30.72MB\n"
+          + "avg: 5.12MB/block, 2.56MB/blocklet, 1,666,666 rows/block, 833,333 rows/blocklet"));
+
+    String[] args2 = {"-cmd", "summary", "-p", path, "-s"};
+    out = new ByteArrayOutputStream();
+    stream = new PrintStream(out);
+    CarbonCli.run(args2, stream);
+    output = new String(out.toByteArray());
+
+    Assert.assertTrue(
+        output.contains(
+            "Column Name  Data Type  Column Type  SortColumn  Encoding          Ordinal  Id  \n"
+          + "age          INT        dimension    true        [INVERTED_INDEX]  1        NA  \n"
+          + "name         STRING     dimension    false       [INVERTED_INDEX]  0        NA  \n"));
+
+    String[] args3 = {"-cmd", "summary", "-p", path, "-t"};
+    out = new ByteArrayOutputStream();
+    stream = new PrintStream(out);
+    CarbonCli.run(args3, stream);
+    output = new String(out.toByteArray());
+
+    Assert.assertTrue(
+        output.contains(
+            "## Table Properties\n"
+          + "Property Name              Property Value  \n"
+          + "'table_blocksize'          '8'             \n"
+          + "'table_blocklet_size'      '3'             \n"
+          + "'local_dictionary_enable'  'false'    "));
+
+    String[] args4 = {"-cmd", "summary", "-p", path, "-b"};
+    out = new ByteArrayOutputStream();
+    stream = new PrintStream(out);
+    CarbonCli.run(args4, stream);
+    output = new String(out.toByteArray());
+
+    Assert.assertTrue(
+        output.contains(
+            "BLK  BLKLT  NumPages  NumRows  Size    \n"
+          + "0    0      29        928,000  2.60MB  \n"
+          + "0    1      29        928,000  2.60MB  \n"
+          + "1    0      29        928,000  2.60MB  \n"
+          + "1    1      29        928,000  2.60MB  \n"
+          + "2    0      22        704,000  2.54MB  \n"
+          + "2    1      19        584,000  2.43MB  "));
+
+    String[] args5 = {"-cmd", "summary", "-p", path, "-c", "name"};
+    out = new ByteArrayOutputStream();
+    stream = new PrintStream(out);
+    CarbonCli.run(args5, stream);
+    output = new String(out.toByteArray());
+
+    Assert.assertTrue(
+        output.contains(
+            "BLK  BLKLT  Meta Size  Data Size  LocalDict  DictEntries  DictSize  AvgPageSize  Min%    Max%    \n"
+          + "0    0      1.82KB     5.19MB     false      0            0.0B      11.96KB      robot0  robot9  \n"
+          + "0    1      1.82KB     2.60MB     false      0            0.0B      11.96KB      robot0  robot9  \n"
+          + "1    0      1.82KB     5.19MB     false      0            0.0B      11.96KB      robot0  robot9  \n"
+          + "1    1      1.82KB     2.60MB     false      0            0.0B      11.96KB      robot0  robot9  \n"
+          + "2    0      1.38KB     4.97MB     false      0            0.0B      11.92KB      robot0  robot9  \n"
+          + "2    1      1.19KB     2.43MB     false      0            0.0B      11.42KB      robot0  robot9  \n"));
+  }
+
+  @Test
+  public void testOutputAll() {
+    String[] args = {"-cmd", "summary", "-p", path, "-a", "-c", "age"};
+
+    ByteArrayOutputStream out = new ByteArrayOutputStream();
+    PrintStream stream = new PrintStream(out);
+    CarbonCli.run(args, stream);
+    String output = new String(out.toByteArray());
+    Assert.assertTrue(
+        output.contains(
+            "Input Folder: ./CarbonCliTest\n"
+          + "## Summary\n"
+          + "total: 6 blocks, 2 shards, 12 blocklets, 314 pages, 10,000,000 rows, 30.72MB\n"
+          + "avg: 5.12MB/block, 2.56MB/blocklet, 1,666,666 rows/block, 833,333 rows/blocklet"));
+
+    Assert.assertTrue(
+        output.contains(
+            "Column Name  Data Type  Column Type  SortColumn  Encoding          Ordinal  Id  \n"
+          + "age          INT        dimension    true        [INVERTED_INDEX]  1        NA  \n"
+          + "name         STRING     dimension    false       [INVERTED_INDEX]  0        NA  \n"));
+
+    Assert.assertTrue(
+        output.contains(
+            "## Table Properties\n"
+          + "Property Name              Property Value  \n"
+          + "'table_blocksize'          '8'             \n"
+          + "'table_blocklet_size'      '3'             \n"
+          + "'local_dictionary_enable'  'false'    "));
+
+    Assert.assertTrue(
+        output.contains(
+            "BLK  BLKLT  NumPages  NumRows  Size    \n"
+          + "0    0      29        928,000  2.60MB  \n"
+          + "0    1      29        928,000  2.60MB  \n"
+          + "1    0      29        928,000  2.60MB  \n"
+          + "1    1      29        928,000  2.60MB  \n"
+          + "2    0      22        704,000  2.54MB  \n"
+          + "2    1      19        584,000  2.43MB  "));
+
+    Assert.assertTrue(
+        output.contains(
+          "BLK  BLKLT  Meta Size  Data Size  LocalDict  DictEntries  DictSize  AvgPageSize  Min%  Max%   \n"
+        + "0    0      1.81KB     2.26MB     false      0            0.0B      79.61KB      0.0   15.5   \n"
+        + "0    1      1.81KB     2.26MB     false      0            0.0B      79.60KB      15.5  30.9   \n"
+        + "1    0      1.81KB     2.26MB     false      0            0.0B      79.62KB      30.9  46.4   \n"
+        + "1    1      1.81KB     2.26MB     false      0            0.0B      79.60KB      46.4  61.9   \n"
+        + "2    0      1.37KB     2.28MB     false      0            0.0B      106.11KB     61.9  80.5   \n"
+        + "2    1      1.19KB     2.22MB     false      0            0.0B      119.55KB     80.5  100.0  "));
+  }
+
+  @After
+  public void after() throws IOException {
+    FileUtils.deleteDirectory(new File(path));
+  }
+
+}