You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by xu...@apache.org on 2019/01/04 01:28:57 UTC

carbondata git commit: [HOTFIX] Optimize the code style in csdk/sdk markdown doc

Repository: carbondata
Updated Branches:
  refs/heads/master deb08c329 -> 81bf02f54


[HOTFIX] Optimize the code style in csdk/sdk markdown doc

optimize backoff csdk-guide & sdk-guide doc code style

This closes #3030


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/81bf02f5
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/81bf02f5
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/81bf02f5

Branch: refs/heads/master
Commit: 81bf02f541d928224a817cf3f0f4a90223b5d997
Parents: deb08c3
Author: lamber-ken <22...@qq.com>
Authored: Thu Dec 27 21:52:21 2018 +0800
Committer: xubo245 <xu...@huawei.com>
Committed: Fri Jan 4 09:28:41 2019 +0800

----------------------------------------------------------------------
 docs/csdk-guide.md |  713 ++++++++++++++++-----------------
 docs/sdk-guide.md  | 1008 ++++++++++++++++++++++++-----------------------
 2 files changed, 868 insertions(+), 853 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/81bf02f5/docs/csdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/csdk-guide.md b/docs/csdk-guide.md
index 75b6364..ef3f252 100644
--- a/docs/csdk-guide.md
+++ b/docs/csdk-guide.md
@@ -43,114 +43,116 @@ C++ SDK support read batch row. User can set batch by using withBatch(int batch)
 ## API List
 ### CarbonReader
 ```
-    /**
-     * create a CarbonReaderBuilder object for building carbonReader,
-     * CarbonReaderBuilder object  can configure different parameter
-     *
-     * @param env JNIEnv
-     * @param path data store path
-     * @param tableName table name
-     * @return CarbonReaderBuilder object
-     */
-    jobject builder(JNIEnv *env, char *path, char *tableName);
+/**
+ * Create a CarbonReaderBuilder object for building carbonReader,
+ * CarbonReaderBuilder object  can configure different parameter
+ *
+ * @param env JNIEnv
+ * @param path data store path
+ * @param tableName table name
+ * @return CarbonReaderBuilder object
+ */
+jobject builder(JNIEnv *env, char *path, char *tableName);
 ```
 
 ```
-    /**
-     * create a CarbonReaderBuilder object for building carbonReader,
-     * CarbonReaderBuilder object  can configure different parameter
-     *
-     * @param env JNIEnv
-     * @param path data store path
-     * */
-    void builder(JNIEnv *env, char *path);
+/**
+ * Create a CarbonReaderBuilder object for building carbonReader,
+ * CarbonReaderBuilder object can configure different parameter
+ *
+ * @param env JNIEnv
+ * @param path data store path
+ * 
+ */
+void builder(JNIEnv *env, char *path);
 ```
 
 ```
-    /**
-     * Configure the projection column names of carbon reader
-     *
-     * @param argc argument counter
-     * @param argv argument vector
-     * @return CarbonReaderBuilder object
-     */
-    jobject projection(int argc, char *argv[]);
+/**
+ * Configure the projection column names of carbon reader
+ *
+ * @param argc argument counter
+ * @param argv argument vector
+ * @return CarbonReaderBuilder object
+ */
+jobject projection(int argc, char *argv[]);
 ```
 
 ```
-    /**
-     *  build carbon reader with argument vector
-     *  it support multiple parameter
-     *  like: key=value
-     *  for example: fs.s3a.access.key=XXXX, XXXX is user's access key value
-     *
-     * @param argc argument counter
-     * @param argv argument vector
-     * @return CarbonReaderBuilder object
-     **/
-    jobject withHadoopConf(int argc, char *argv[]);
+/**
+ * Build carbon reader with argument vector
+ * it supports multiple parameters
+ * like: key=value
+ * for example: fs.s3a.access.key=XXXX, XXXX is user's access key value
+ *
+ * @param argc argument counter
+ * @param argv argument vector
+ * @return CarbonReaderBuilder object
+ *
+ */
+jobject withHadoopConf(int argc, char *argv[]);
 ```
 
 ```
-   /**
-     * Sets the batch size of records to read
-     *
-     * @param batch batch size
-     * @return CarbonReaderBuilder object
-     */
-    void withBatch(int batch);
+/**
+ * Sets the batch size of records to read
+ *
+ * @param batch batch size
+ * @return CarbonReaderBuilder object
+ */
+void withBatch(int batch);
 ```
 
 ```
-    /**
-     * Configure Row Record Reader for reading.
-     */
-    void withRowRecordReader();
+/**
+ * Configure Row Record Reader for reading.
+ */
+void withRowRecordReader();
 ```
 
 ```
-    /**
-     * build carbonReader object for reading data
-     * it support read data from load disk
-     *
-     * @return carbonReader object
-     */
-    jobject build();
+/**
+ * Build carbonReader object for reading data
+ * it supports read data from load disk
+ *
+ * @return carbonReader object
+ */
+jobject build();
 ```
 
 ```
-    /**
-     * Whether it has next row data
-     *
-     * @return boolean value, if it has next row, return true. if it hasn't next row, return false.
-     */
-    jboolean hasNext();
+/**
+ * Whether it has next row data
+ *
+ * @return boolean value, if it has next row, return true. if it hasn't next row, return false.
+ */
+jboolean hasNext();
 ```
 
 ```
-    /**
-     * read next carbonRow from data
-     * @return carbonRow object of one row
-     */
-     jobject readNextRow();
+/**
+ * Read next carbonRow from data
+ * @return carbonRow object of one row
+ */
+jobject readNextRow();
 ```
 
 ```
-    /**
-     * read Next Batch Row
-     *
-     * @return rows
-     */
-    jobjectArray readNextBatchRow();
+/**
+ * Read Next Batch Row
+ *
+ * @return rows
+ */
+jobjectArray readNextBatchRow();
 ```
 
 ```
-    /**
-     * close the carbon reader
-     *
-     * @return  boolean value
-     */
-    jboolean close();
+/**
+ * Close the carbon reader
+ *
+ * @return  boolean value
+ */
+jboolean close();
 ```
 
 # C++ SDK Writer
@@ -172,361 +174,360 @@ release the memory and destroy JVM.
 ## API List
 ### CarbonWriter
 ```
-    /**
-     * create a CarbonWriterBuilder object for building carbonWriter,
-     * CarbonWriterBuilder object  can configure different parameter
-     *
-     * @param env JNIEnv
-     * @return CarbonWriterBuilder object
-     */
-    void builder(JNIEnv *env);
-```
-
-```
-    /**
-     * Sets the output path of the writer builder
-     *
-     * @param path is the absolute path where output files are written
-     * This method must be called when building CarbonWriterBuilder
-     * @return updated CarbonWriterBuilder
-     */
-    void outputPath(char *path);
-```
-
-```
-    /**
-      * sets the list of columns that needs to be in sorted order
-      *
-      * @param argc argc argument counter, the number of projection column
-      * @param argv argv is a string array of columns that needs to be sorted.
-      *                  If it is null or by default all dimensions are selected for sorting
-      *                  If it is empty array, no columns are sorted
-      */
-    void sortBy(int argc, char *argv[]);
-```
-
-```
-    /**
-     * configure the schema with json style schema
-     *
-     * @param jsonSchema json style schema
-     * @return updated CarbonWriterBuilder
-     */
-    void withCsvInput(char *jsonSchema);
-```
-
-```
-    /**
-    * Updates the hadoop configuration with the given key value
-    *
-    * @param key key word
-    * @param value value
-    * @return CarbonWriterBuilder object
-    */
-    void withHadoopConf(char *key, char *value);
-```
-
-```
- /**
-     *  To support the table properties for writer
-     *
-     * @param key properties key
-     * @param value properties value
-     */
-    void withTableProperty(char *key, char *value);
-```
-
-```
-    /**
-     * To support the load options for C++ sdk writer
-     *
-     * @param options key,value pair of load options.
-     * supported keys values are
-     * a. bad_records_logger_enable -- true (write into separate logs), false
-     * b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
-     * c. bad_record_path -- path
-     * d. dateformat -- same as JAVA SimpleDateFormat
-     * e. timestampformat -- same as JAVA SimpleDateFormat
-     * f. complex_delimiter_level_1 -- value to Split the complexTypeData
-     * g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
-     * h. quotechar
-     * i. escapechar
-     *
-     * Default values are as follows.
-     *
-     * a. bad_records_logger_enable -- "false"
-     * b. bad_records_action -- "FAIL"
-     * c. bad_record_path -- ""
-     * d. dateformat -- "" , uses from carbon.properties file
-     * e. timestampformat -- "", uses from carbon.properties file
-     * f. complex_delimiter_level_1 -- "$"
-     * g. complex_delimiter_level_2 -- ":"
-     * h. quotechar -- "\""
-     * i. escapechar -- "\\"
-     *
-     * @return updated CarbonWriterBuilder
-     */
-    void withLoadOption(char *key, char *value);
+/**
+ * Create a CarbonWriterBuilder object for building carbonWriter,
+ * CarbonWriterBuilder object  can configure different parameter
+ *
+ * @param env JNIEnv
+ * @return CarbonWriterBuilder object
+ */
+void builder(JNIEnv *env);
+```
+
+```
+/**
+ * Sets the output path of the writer builder
+ *
+ * @param path is the absolute path where output files are written
+ * This method must be called when building CarbonWriterBuilder
+ * @return updated CarbonWriterBuilder
+ */
+void outputPath(char *path);
+```
+
+```
+/**
+ * Sets the list of columns that needs to be in sorted order
+ *
+ * @param argc argc argument counter, the number of projection column
+ * @param argv argv is a string array of columns that needs to be sorted.
+ *                  If it is null or by default all dimensions are selected for sorting
+ *                  If it is empty array, no columns are sorted
+ */
+void sortBy(int argc, char *argv[]);
+```
+
+```
+/**
+ * Configure the schema with json style schema
+ *
+ * @param jsonSchema json style schema
+ * @return updated CarbonWriterBuilder
+ */
+void withCsvInput(char *jsonSchema);
+```
+
+```
+/**
+ * Updates the hadoop configuration with the given key value
+ *
+ * @param key key word
+ * @param value value
+ * @return CarbonWriterBuilder object
+ */
+void withHadoopConf(char *key, char *value);
+```
+
+```
+/**
+ * To support the table properties for writer
+ *
+ * @param key properties key
+ * @param value properties value
+ */
+void withTableProperty(char *key, char *value);
+```
+
+```
+/**
+ * To support the load options for C++ sdk writer
+ *
+ * @param options key,value pair of load options.
+ * supported keys values are
+ * a. bad_records_logger_enable -- true (write into separate logs), false
+ * b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
+ * c. bad_record_path -- path
+ * d. dateformat -- same as JAVA SimpleDateFormat
+ * e. timestampformat -- same as JAVA SimpleDateFormat
+ * f. complex_delimiter_level_1 -- value to Split the complexTypeData
+ * g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
+ * h. quotechar
+ * i. escapechar
+ *
+ * Default values are as follows.
+ *
+ * a. bad_records_logger_enable -- "false"
+ * b. bad_records_action -- "FAIL"
+ * c. bad_record_path -- ""
+ * d. dateformat -- "" , uses from carbon.properties file
+ * e. timestampformat -- "", uses from carbon.properties file
+ * f. complex_delimiter_level_1 -- "$"
+ * g. complex_delimiter_level_2 -- ":"
+ * h. quotechar -- "\""
+ * i. escapechar -- "\\"
+ *
+ * @return updated CarbonWriterBuilder
+ */
+void withLoadOption(char *key, char *value);
 ```
 
 ```
-    /**
-     * sets the taskNo for the writer. CSDKs concurrently running
-     * will set taskNo in order to avoid conflicts in file's name during write.
-     *
-     * @param taskNo is the TaskNo user wants to specify.
-     *               by default it is system time in nano seconds.
-     */
-    void taskNo(long taskNo);
+/**
+ * Sets the taskNo for the writer. CSDKs concurrently running
+ * will set taskNo in order to avoid conflicts in file's name during write.
+ *
+ * @param taskNo is the TaskNo user wants to specify.
+ *               by default it is system time in nano seconds.
+ */
+void taskNo(long taskNo);
 ```
 
 ```
-    /**
-     * to set the timestamp in the carbondata and carbonindex index files
-     *
-     * @param timestamp is a timestamp to be used in the carbondata and carbonindex index files.
-     * By default set to zero.
-     * @return updated CarbonWriterBuilder
-     */
-    void uniqueIdentifier(long timestamp);
+/**
+ * Set the timestamp in the carbondata and carbonindex index files
+ *
+ * @param timestamp is a timestamp to be used in the carbondata and carbonindex index files.
+ * By default set to zero.
+ * @return updated CarbonWriterBuilder
+ */
+void uniqueIdentifier(long timestamp);
 ```
 
 ```
-    /**
-     * To make c++ sdk writer thread safe.
-     *
-     * @param numOfThreads should number of threads in which writer is called in multi-thread scenario
-     *                      default C++ sdk writer is not thread safe.
-     *                      can use one writer instance in one thread only.
-     */
-    void withThreadSafe(short numOfThreads) ;
+/**
+ * To make c++ sdk writer thread safe.
+ *
+ * @param numOfThreads should number of threads in which writer is called in multi-thread scenario
+ *                      default C++ sdk writer is not thread safe.
+ *                      can use one writer instance in one thread only.
+ */
+void withThreadSafe(short numOfThreads) ;
 ```
 
 ```
-    /**
-     * To set the carbondata file size in MB between 1MB-2048MB
-     *
-     * @param blockSize is size in MB between 1MB to 2048 MB
-     * default value is 1024 MB
-     */
-    void withBlockSize(int blockSize);
+/**
+ * To set the carbondata file size in MB between 1MB-2048MB
+ *
+ * @param blockSize is size in MB between 1MB to 2048 MB
+ * default value is 1024 MB
+ */
+void withBlockSize(int blockSize);
 ```
 
 ```
-    /**
-     * To set the blocklet size of CarbonData file
-     *
-     * @param blockletSize is blocklet size in MB
-     *        default value is 64 MB
-     * @return updated CarbonWriterBuilder
-     */
-    void withBlockletSize(int blockletSize);
+/**
+ * To set the blocklet size of CarbonData file
+ *
+ * @param blockletSize is blocklet size in MB
+ *        default value is 64 MB
+ * @return updated CarbonWriterBuilder
+ */
+void withBlockletSize(int blockletSize);
 ```
 
 ```
-    /**
-     * @param localDictionaryThreshold is localDictionaryThreshold, default is 10000
-     * @return updated CarbonWriterBuilder
-     */
-    void localDictionaryThreshold(int localDictionaryThreshold);
+/**
+ * @param localDictionaryThreshold is localDictionaryThreshold, default is 10000
+ * @return updated CarbonWriterBuilder
+ */
+void localDictionaryThreshold(int localDictionaryThreshold);
 ```
 
 ```
-    /**
-     * @param enableLocalDictionary enable local dictionary, default is false
-     * @return updated CarbonWriterBuilder
-     */
-    void enableLocalDictionary(bool enableLocalDictionary);
+/**
+ * @param enableLocalDictionary enable local dictionary, default is false
+ * @return updated CarbonWriterBuilder
+ */
+void enableLocalDictionary(bool enableLocalDictionary);
 ```
 
 ```
-    /**
-     * @param appName appName which is writing the carbondata files
-     */
-    void writtenBy(char *appName);
+/**
+ * @param appName appName which is writing the carbondata files
+ */
+void writtenBy(char *appName);
 ```
 
 ```
-    /**
-     * build carbonWriter object for writing data
-     * it support write data from load disk
-     *
-     * @return carbonWriter object
-     */
-    void build();
+/**
+ * Build carbonWriter object for writing data
+ * it support write data from load disk
+ *
+ * @return carbonWriter object
+ */
+void build();
 ```
 
 ```
-    /**
-     * Write an object to the file, the format of the object depends on the
-     * implementation.
-     * Note: This API is not thread safe
-     */
-    void write(jobject obj);
+/**
+ * Write an object to the file, the format of the object depends on the
+ * implementation.
+ * Note: This API is not thread safe
+ */
+void write(jobject obj);
 ```
 
 ```
-    /**
-     * close the carbon Writer
-     */
-    void close();
+/**
+ * close the carbon Writer
+ */
+void close();
 ```
 
 ### CarbonSchemaReader
 
 ```
-    /**
-     * constructor with jni env
-     *
-     * @param env  jni env
-     */
-    CarbonSchemaReader(JNIEnv *env);
+/**
+ * Constructor with jni env
+ *
+ * @param env  jni env
+ */
+CarbonSchemaReader(JNIEnv *env);
 ```
 
 ```
-    /**
-     * read schema from path,
-     * path can be folder path, carbonindex file path, and carbondata file path
-     * and will not check all files schema
-     *
-     * @param path file/folder path
-     * @return schema
-     */
-    jobject readSchema(char *path);
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and will not check all files schema
+ *
+ * @param path file/folder path
+ * @return schema
+ */
+jobject readSchema(char *path);
 ```
 
 ```
-    /**
-     *  read schema from path,
-     *  path can be folder path, carbonindex file path, and carbondata file path
-     *  and user can decide whether check all files schema
-     *
-     * @param path carbon data path
-     * @param validateSchema whether check all files schema
-     * @return schema
-     */
-    jobject readSchema(char *path, bool validateSchema);
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and user can decide whether check all files schema
+ *
+ * @param path carbon data path
+ * @param validateSchema whether check all files schema
+ * @return schema
+ */
+jobject readSchema(char *path, bool validateSchema);
 ```
 
 ```
-    /**
-     * read schema from path,
-     * path can be folder path, carbonindex file path, and carbondata file path
-     * and will not check all files schema
-     *
-     * @param path file/folder path
-     * @param conf           configuration support, can set s3a AK,SK,
-     *                       end point and other conf with this
-     * @return schema
-     */
-    jobject readSchema(char *path, Configuration conf);
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and will not check all files schema
+ *
+ * @param path file/folder path
+ * @param conf           configuration support, can set s3a AK,SK,
+ *                       end point and other conf with this
+ * @return schema
+ */
+jobject readSchema(char *path, Configuration conf);
 ```
 
 ```
-    /**
-     *  read schema from path,
-     *  path can be folder path, carbonindex file path, and carbondata file path
-     *  and user can decide whether check all files schema
-     *
-     * @param path carbon data path
-     * @param validateSchema whether check all files schema
-     * @param conf           configuration support, can set s3a AK,SK,
-     *                       end point and other conf with this
-     * @return schema
-     */
-    jobject readSchema(char *path, bool validateSchema, Configuration conf);
-
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and user can decide whether check all files schema
+ *
+ * @param path carbon data path
+ * @param validateSchema whether check all files schema
+ * @param conf           configuration support, can set s3a AK,SK,
+ *                       end point and other conf with this
+ * @return schema
+ */
+jobject readSchema(char *path, bool validateSchema, Configuration conf);
 ```
 
 ### Schema
 ```
- /**
-     * constructor with jni env and carbon schema data
-     *
-     * @param env jni env
-     * @param schema  carbon schema data
-     */
-    Schema(JNIEnv *env, jobject schema);
+/**
+ * Constructor with jni env and carbon schema data
+ *
+ * @param env jni env
+ * @param schema  carbon schema data
+ */
+Schema(JNIEnv *env, jobject schema);
 ```
 
 ```
-    /**
-     * get fields length of schema
-     *
-     * @return fields length
-     */
-    int getFieldsLength();
+/**
+ * Get fields length of schema
+ *
+ * @return fields length
+ */
+int getFieldsLength();
 ```
 
 ```
-    /**
-     * get field name by ordinal
-     *
-     * @param ordinal the data index of carbon schema
-     * @return ordinal field name
-     */
-    char *getFieldName(int ordinal);
+/**
+ * Get field name by ordinal
+ *
+ * @param ordinal the data index of carbon schema
+ * @return ordinal field name
+ */
+char *getFieldName(int ordinal);
 ```
 
 ```
-    /**
-     * get  field data type name by ordinal
-     *
-     * @param ordinal the data index of carbon schema
-     * @return ordinal field data type name
-     */
-    char *getFieldDataTypeName(int ordinal);
+/**
+ * Get  field data type name by ordinal
+ *
+ * @param ordinal the data index of carbon schema
+ * @return ordinal field data type name
+ */
+char *getFieldDataTypeName(int ordinal);
 ```
 
 ```
-    /**
-     * get  array child element data type name by ordinal
-     *
-     * @param ordinal the data index of carbon schema
-     * @return ordinal array child element data type name
-     */
-    char *getArrayElementTypeName(int ordinal);
+/**
+ * Get  array child element data type name by ordinal
+ *
+ * @param ordinal the data index of carbon schema
+ * @return ordinal array child element data type name
+ */
+char *getArrayElementTypeName(int ordinal);
 ```
 
 ### CarbonProperties
 ```
-  /**
-     * Constructor of CarbonProperties
-     *
-     * @param env JNI env
-     */
-    CarbonProperties(JNIEnv *env);
+/**
+ * Constructor of CarbonProperties
+ *
+ * @param env JNI env
+ */
+CarbonProperties(JNIEnv *env);
 ```
 
 ```
-    /**
-     * This method will be used to add a new property
-     * 
-     * @param key property key
-     * @param value property value
-     * @return CarbonProperties object
-     */
-    jobject addProperty(char *key, char *value);
+/**
+ * This method will be used to add a new property
+ * 
+ * @param key property key
+ * @param value property value
+ * @return CarbonProperties object
+ */
+jobject addProperty(char *key, char *value);
 ```
 
 ```
-    /**
-     * This method will be used to get the properties value
-     *
-     * @param key  property key
-     * @return  property value
-     */
-    char *getProperty(char *key);
+/**
+ * This method will be used to get the properties value
+ *
+ * @param key property key
+ * @return property value
+ */
+char *getProperty(char *key);
 ```
 
 ```
-    /**
-     * This method will be used to get the properties value
-     * if property is not present then it will return the default value
-     *
-     * @param key  property key
-     * @param defaultValue  property default Value
-     * @return
-     */
-    char *getProperty(char *key, char *defaultValue);
+/**
+ * This method will be used to get the properties value
+ * if property is not present then it will return the default value
+ *
+ * @param key  property key
+ * @param defaultValue  property default Value
+ * @return
+ */
+char *getProperty(char *key, char *defaultValue);
 ```

http://git-wip-us.apache.org/repos/asf/carbondata/blob/81bf02f5/docs/sdk-guide.md
----------------------------------------------------------------------
diff --git a/docs/sdk-guide.md b/docs/sdk-guide.md
index dc1fe46..573b595 100644
--- a/docs/sdk-guide.md
+++ b/docs/sdk-guide.md
@@ -39,49 +39,49 @@ These SDK writer output contains just carbondata and carbonindex files. No metad
 ### Example with csv format 
 
 ```java
- import java.io.IOException;
- 
- import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
- import org.apache.carbondata.core.metadata.datatype.DataTypes;
- import org.apache.carbondata.core.util.CarbonProperties;
- import org.apache.carbondata.sdk.file.CarbonWriter;
- import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
- import org.apache.carbondata.sdk.file.Field;
- import org.apache.carbondata.sdk.file.Schema;
- 
- public class TestSdk {
-
-   // pass true or false while executing the main to use offheap memory or not
-   public static void main(String[] args) throws IOException, InvalidLoadOptionException {
-     if (args.length > 0 && args[0] != null) {
-       testSdkWriter(args[0]);
-     } else {
-       testSdkWriter("true");
-     }
-   }
- 
-   public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
-     String path = "./target/testCSVSdkWriter";
- 
-     Field[] fields = new Field[2];
-     fields[0] = new Field("name", DataTypes.STRING);
-     fields[1] = new Field("age", DataTypes.INT);
- 
-     Schema schema = new Schema(fields);
+import java.io.IOException;
 
-     CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
- 
-     CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path).withCsvInput(schema).writtenBy("SDK");
- 
-     CarbonWriter writer = builder.build();
- 
-     int rows = 5;
-     for (int i = 0; i < rows; i++) {
-       writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
-     }
-     writer.close();
-   }
- }
+import org.apache.carbondata.common.exceptions.sql.InvalidLoadOptionException;
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.sdk.file.CarbonWriter;
+import org.apache.carbondata.sdk.file.CarbonWriterBuilder;
+import org.apache.carbondata.sdk.file.Field;
+import org.apache.carbondata.sdk.file.Schema;
+
+public class TestSdk {
+
+  // pass true or false while executing the main to use offheap memory or not
+  public static void main(String[] args) throws IOException, InvalidLoadOptionException {
+    if (args.length > 0 && args[0] != null) {
+      testSdkWriter(args[0]);
+    } else {
+      testSdkWriter("true");
+    }
+  }
+
+  public static void testSdkWriter(String enableOffheap) throws IOException, InvalidLoadOptionException {
+    String path = "./target/testCSVSdkWriter";
+
+    Field[] fields = new Field[2];
+    fields[0] = new Field("name", DataTypes.STRING);
+    fields[1] = new Field("age", DataTypes.INT);
+
+    Schema schema = new Schema(fields);
+
+    CarbonProperties.getInstance().addProperty("enable.offheap.sort", enableOffheap);
+
+    CarbonWriterBuilder builder = CarbonWriter.builder().outputPath(path).withCsvInput(schema).writtenBy("SDK");
+
+    CarbonWriter writer = builder.build();
+
+    int rows = 5;
+    for (int i = 0; i < rows; i++) {
+      writer.write(new String[] { "robot" + (i % 10), String.valueOf(i) });
+    }
+    writer.close();
+  }
+}
 ```
 
 ### Example with Avro format
@@ -251,248 +251,255 @@ Find example code at [DirectSQLExample](https://github.com/apache/carbondata/blo
 ### Class org.apache.carbondata.sdk.file.CarbonWriterBuilder
 ```
 /**
-* Sets the output path of the writer builder
-* @param path is the absolute path where output files are written
-*             This method must be called when building CarbonWriterBuilder
-* @return updated CarbonWriterBuilder
-*/
+ * Sets the output path of the writer builder
+ *
+ * @param path is the absolute path where output files are written
+ *             This method must be called when building CarbonWriterBuilder
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder outputPath(String path);
 ```
 
 ```
 /**
-* to set the timestamp in the carbondata and carbonindex index files
-* @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
-*             By default set to zero.
-* @return updated CarbonWriterBuilder
-*/
+ * To set the timestamp in the carbondata and carbonindex index files
+ *
+ * @param UUID is a timestamp to be used in the carbondata and carbonindex index files.
+ *             By default set to zero.
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder uniqueIdentifier(long UUID);
 ```
 
 ```
 /**
-* To set the carbondata file size in MB between 1MB-2048MB
-* @param blockSize is size in MB between 1MB to 2048 MB
-*                  default value is 1024 MB
-* @return updated CarbonWriterBuilder
-*/
+ * To set the carbondata file size in MB between 1MB-2048MB
+ *
+ * @param blockSize is size in MB between 1MB to 2048 MB
+ *                  default value is 1024 MB
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withBlockSize(int blockSize);
 ```
 
 ```
 /**
-* To set the blocklet size of carbondata file
-* @param blockletSize is blocklet size in MB
-*                     default value is 64 MB
-* @return updated CarbonWriterBuilder
-*/
+ * To set the blocklet size of carbondata file
+ *
+ * @param blockletSize is blocklet size in MB
+ *                     default value is 64 MB
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withBlockletSize(int blockletSize);
 ```
 
 ```
 /**
-   * @param enableLocalDictionary enable local dictionary  , default is false
-   * @return updated CarbonWriterBuilder
-   */
+ * @param enableLocalDictionary enable local dictionary  , default is false
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder enableLocalDictionary(boolean enableLocalDictionary);
 ```
 
 ```
 /**
-   * @param localDictionaryThreshold is localDictionaryThreshold,default is 10000
-   * @return updated CarbonWriterBuilder
-   */
+ * @param localDictionaryThreshold is localDictionaryThreshold,default is 10000
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder localDictionaryThreshold(int localDictionaryThreshold) ;
 ```
 
 
 ```
 /**
-* sets the list of columns that needs to be in sorted order
-* @param sortColumns is a string array of columns that needs to be sorted.
-*                    If it is null or by default all dimensions are selected for sorting
-*                    If it is empty array, no columns are sorted
-* @return updated CarbonWriterBuilder
-*/
+ * Sets the list of columns that needs to be in sorted order
+ *
+ * @param sortColumns is a string array of columns that needs to be sorted.
+ *                    If it is null or by default all dimensions are selected for sorting
+ *                    If it is empty array, no columns are sorted
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder sortBy(String[] sortColumns);
 ```
 
 ```
 /**
-* sets the taskNo for the writer. SDKs concurrently running
-* will set taskNo in order to avoid conflicts in file's name during write.
-* @param taskNo is the TaskNo user wants to specify.
-*               by default it is system time in nano seconds.
-* @return updated CarbonWriterBuilder
-*/
+ * Sets the taskNo for the writer. SDKs concurrently running
+ * will set taskNo in order to avoid conflicts in file's name during write.
+ *
+ * @param taskNo is the TaskNo user wants to specify.
+ *               by default it is system time in nano seconds.
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder taskNo(long taskNo);
 ```
 
 ```
 /**
-* To support the load options for sdk writer
-* @param options key,value pair of load options.
-*                supported keys values are
-*                a. bad_records_logger_enable -- true (write into separate logs), false
-*                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
-*                c. bad_record_path -- path
-*                d. dateformat -- same as JAVA SimpleDateFormat
-*                e. timestampformat -- same as JAVA SimpleDateFormat
-*                f. complex_delimiter_level_1 -- value to Split the complexTypeData
-*                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
-*                h. quotechar
-*                i. escapechar
-*                
-*                Default values are as follows.
-*
-*                a. bad_records_logger_enable -- "false"
-*                b. bad_records_action -- "FAIL"
-*                c. bad_record_path -- ""
-*                d. dateformat -- "" , uses from carbon.properties file
-*                e. timestampformat -- "", uses from carbon.properties file
-*                f. complex_delimiter_level_1 -- "$"
-*                g. complex_delimiter_level_2 -- ":"
-*                h. quotechar -- "\""
-*                i. escapechar -- "\\"
-*
-* @return updated CarbonWriterBuilder
-*/
+ * To support the load options for sdk writer
+ * @param options key,value pair of load options.
+ *                supported keys values are
+ *                a. bad_records_logger_enable -- true (write into separate logs), false
+ *                b. bad_records_action -- FAIL, FORCE, IGNORE, REDIRECT
+ *                c. bad_record_path -- path
+ *                d. dateformat -- same as JAVA SimpleDateFormat
+ *                e. timestampformat -- same as JAVA SimpleDateFormat
+ *                f. complex_delimiter_level_1 -- value to Split the complexTypeData
+ *                g. complex_delimiter_level_2 -- value to Split the nested complexTypeData
+ *                h. quotechar
+ *                i. escapechar
+ *                
+ *                Default values are as follows.
+ *
+ *                a. bad_records_logger_enable -- "false"
+ *                b. bad_records_action -- "FAIL"
+ *                c. bad_record_path -- ""
+ *                d. dateformat -- "" , uses from carbon.properties file
+ *                e. timestampformat -- "", uses from carbon.properties file
+ *                f. complex_delimiter_level_1 -- "$"
+ *                g. complex_delimiter_level_2 -- ":"
+ *                h. quotechar -- "\""
+ *                i. escapechar -- "\\"
+ *
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withLoadOptions(Map<String, String> options);
 ```
 
 ```
 /**
-* To support the table properties for sdk writer
-*
-* @param options key,value pair of create table properties.
-* supported keys values are
-* a. table_blocksize -- [1-2048] values in MB. Default value is 1024
-* b. table_blocklet_size -- values in MB. Default value is 64 MB
-* c. local_dictionary_threshold -- positive value, default is 10000
-* d. local_dictionary_enable -- true / false. Default is false
-* e. sort_columns -- comma separated column. "c1,c2". Default no columns are sorted.
-* j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is "no_sort"
-* k. long_string_columns -- comma separated string columns which are more than 32k length. 
-*                           default value is null.
-* l. inverted_index -- comma separated string columns for which inverted index needs to be
-*                      generated
-*
-* @return updated CarbonWriterBuilder
-*/
+ * To support the table properties for sdk writer
+ *
+ * @param options key,value pair of create table properties.
+ * supported keys values are
+ * a. table_blocksize -- [1-2048] values in MB. Default value is 1024
+ * b. table_blocklet_size -- values in MB. Default value is 64 MB
+ * c. local_dictionary_threshold -- positive value, default is 10000
+ * d. local_dictionary_enable -- true / false. Default is false
+ * e. sort_columns -- comma separated column. "c1,c2". Default no columns are sorted.
+ * j. sort_scope -- "local_sort", "no_sort", "batch_sort". default value is "no_sort"
+ * k. long_string_columns -- comma separated string columns which are more than 32k length. 
+ *                           default value is null.
+ * l. inverted_index -- comma separated string columns for which inverted index needs to be
+ *                      generated
+ *
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withTableProperties(Map<String, String> options);
 ```
 
 ```
 /**
-* To make sdk writer thread safe.
-*
-* @param numOfThreads should number of threads in which writer is called in multi-thread scenario
-*                     default sdk writer is not thread safe.
-*                     can use one writer instance in one thread only.
-* @return updated CarbonWriterBuilder
-*/
+ * To make sdk writer thread safe.
+ *
+ * @param numOfThreads should number of threads in which writer is called in multi-thread scenario
+ *                     default sdk writer is not thread safe.
+ *                     can use one writer instance in one thread only.
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withThreadSafe(short numOfThreads);
 ```
 
 ```
 /**
-* To support hadoop configuration
-*
-* @param conf hadoop configuration support, can set s3a AK,SK,end point and other conf with this
-* @return updated CarbonWriterBuilder
-*/
+ * To support hadoop configuration
+ *
+ * @param conf hadoop configuration support, can set s3a AK,SK,end point and other conf with this
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withHadoopConf(Configuration conf)
 ```
 
 ```
-  /**
-   * Updates the hadoop configuration with the given key value
-   *
-   * @param key   key word
-   * @param value value
-   * @return this object
-   */
-  public CarbonWriterBuilder withHadoopConf(String key, String value);
+/**
+ * Updates the hadoop configuration with the given key value
+ *
+ * @param key   key word
+ * @param value value
+ * @return this object
+ */
+public CarbonWriterBuilder withHadoopConf(String key, String value);
 ```
 
 ```
 /**
-* to build a {@link CarbonWriter}, which accepts row in CSV format
-*
-* @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
-* @return CarbonWriterBuilder
-*/
+ * To build a {@link CarbonWriter}, which accepts row in CSV format
+ *
+ * @param schema carbon Schema object {org.apache.carbondata.sdk.file.Schema}
+ * @return CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withCsvInput(Schema schema);
 ```
 
 ```
 /**
-* to build a {@link CarbonWriter}, which accepts Avro object
-*
-* @param avroSchema avro Schema object {org.apache.avro.Schema}
-* @return CarbonWriterBuilder
-*/
+ * To build a {@link CarbonWriter}, which accepts Avro object
+ *
+ * @param avroSchema avro Schema object {org.apache.avro.Schema}
+ * @return CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withAvroInput(org.apache.avro.Schema avroSchema);
 ```
 
 ```
 /**
-* to build a {@link CarbonWriter}, which accepts Json object
-*
-* @param carbonSchema carbon Schema object
-* @return CarbonWriterBuilder
-*/
+ * To build a {@link CarbonWriter}, which accepts Json object
+ *
+ * @param carbonSchema carbon Schema object
+ * @return CarbonWriterBuilder
+ */
 public CarbonWriterBuilder withJsonInput(Schema carbonSchema);
 ```
 
 ```
 /**
-* To support writing the ApplicationName which is writing the carbondata file
-* This is a mandatory API to call, else the build() call will fail with error.
-* @param application name which is writing the carbondata files
-* @return CarbonWriterBuilder
-*/
+ * To support writing the ApplicationName which is writing the carbondata file
+ * This is a mandatory API to call, else the build() call will fail with error.
+ * @param application name which is writing the carbondata files
+ * @return CarbonWriterBuilder
+ */
 public CarbonWriterBuilder writtenBy(String appName) {
 ```
 
 ```
 /**
-* sets the list of columns for which inverted index needs to generated
-* @param invertedIndexColumns is a string array of columns for which inverted index needs to
-* generated.
-* If it is null or an empty array, inverted index will be generated for none of the columns
-* @return updated CarbonWriterBuilder
-*/
+ * Sets the list of columns for which inverted index needs to generated
+ *
+ * @param invertedIndexColumns is a string array of columns for which inverted index needs to
+ * generated.
+ * If it is null or an empty array, inverted index will be generated for none of the columns
+ * @return updated CarbonWriterBuilder
+ */
 public CarbonWriterBuilder invertedIndexFor(String[] invertedIndexColumns);
 ```
 
 ```
 /**
-* Build a {@link CarbonWriter}
-* This writer is not thread safe,
-* use withThreadSafe() configuration in multi thread environment
-* 
-* @return CarbonWriter {AvroCarbonWriter/CSVCarbonWriter/JsonCarbonWriter based on Input Type }
-* @throws IOException
-* @throws InvalidLoadOptionException
-*/
+ * Build a {@link CarbonWriter}
+ * This writer is not thread safe,
+ * use withThreadSafe() configuration in multi thread environment
+ * 
+ * @return CarbonWriter {AvroCarbonWriter/CSVCarbonWriter/JsonCarbonWriter based on Input Type }
+ * @throws IOException
+ * @throws InvalidLoadOptionException
+ */
 public CarbonWriter build() throws IOException, InvalidLoadOptionException;
 ```
 
 ```
- /**
-   * Configure Row Record Reader for reading.
-   *
-   */
-  public CarbonReaderBuilder withRowRecordReader()
+/**
+ * Configure Row Record Reader for reading.
+ *
+ */
+public CarbonReaderBuilder withRowRecordReader()
 ```
 
 ### Class org.apache.carbondata.sdk.file.CarbonWriter
 
 ```
 /**
-* Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
-*/
+ * Create a {@link CarbonWriterBuilder} to build a {@link CarbonWriter}
+ */
 public static CarbonWriterBuilder builder() {
     return new CarbonWriterBuilder();
 }
@@ -500,40 +507,43 @@ public static CarbonWriterBuilder builder() {
 
 ```
 /**
-* Write an object to the file, the format of the object depends on the implementation
-* If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record, 
-*                      which is one row of data.
-* If CSVCarbonWriter, object is of type String[], which is one row of data
-* If JsonCarbonWriter, object is of type String, which is one row of json
-* @param object
-* @throws IOException
-*/
+ * Write an object to the file, the format of the object depends on the implementation
+ * If AvroCarbonWriter, object is of type org.apache.avro.generic.GenericData.Record, 
+ *                      which is one row of data.
+ * If CSVCarbonWriter, object is of type String[], which is one row of data
+ * If JsonCarbonWriter, object is of type String, which is one row of json
+ *
+ * @param object
+ * @throws IOException
+ */
 public abstract void write(Object object) throws IOException;
 ```
 
 ```
 /**
-* Flush and close the writer
-*/
+ * Flush and close the writer
+ */
 public abstract void close() throws IOException;
 ```
 
 ### Class org.apache.carbondata.sdk.file.Field
 ```
 /**
-* Field Constructor
-* @param name name of the field
-* @param type datatype of field, specified in strings.
-*/
+ * Field Constructor
+ *
+ * @param name name of the field
+ * @param type datatype of field, specified in strings.
+ */
 public Field(String name, String type);
 ```
 
 ```
 /**
-* Field constructor
-* @param name name of the field
-* @param type datatype of the field of class DataType
-*/
+ * Field constructor
+ *
+ * @param name name of the field
+ * @param type datatype of the field of class DataType
+ */
 public Field(String name, DataType type);  
 ```
 
@@ -541,33 +551,34 @@ public Field(String name, DataType type);
 
 ```
 /**
-* construct a schema with fields
-* @param fields
-*/
+ * Construct a schema with fields
+ *
+ * @param fields
+ */
 public Schema(Field[] fields);
 ```
 
 ```
 /**
-* Create a Schema using JSON string, for example:
-* [
-*   {"name":"string"},
-*   {"age":"int"}
-* ] 
-* @param json specified as string
-* @return Schema
-*/
+ * Create a Schema using JSON string, for example:
+ * [
+ *   {"name":"string"},
+ *   {"age":"int"}
+ * ] 
+ * @param json specified as string
+ * @return Schema
+ */
 public static Schema parseJson(String json);
 ```
 
 ### Class org.apache.carbondata.sdk.file.AvroCarbonWriter
 ```
 /**
-* converts avro schema to carbon schema, required by carbonWriter
-*
-* @param avroSchemaString json formatted avro schema as string
-* @return carbon sdk schema
-*/
+ * Converts avro schema to carbon schema, required by carbonWriter
+ *
+ * @param avroSchemaString json formatted avro schema as string
+ * @return carbon sdk schema
+ */
 public static org.apache.carbondata.sdk.file.Schema getCarbonSchemaFromAvroSchema(String avroSchemaString);
 ```
 # SDK Reader
@@ -575,28 +586,28 @@ This SDK reader reads CarbonData file and carbonindex file at a given path.
 External client can make use of this reader to read CarbonData files without CarbonSession.
 ## Quick example
 ```
-    // 1. Create carbon reader
-    String path = "./testWriteFiles";
-    CarbonReader reader = CarbonReader
-        .builder(path, "_temp")
-        .projection(new String[]{"stringField", "shortField", "intField", "longField", 
-                "doubleField", "boolField", "dateField", "timeField", "decimalField"})
-        .build();
-
-    // 2. Read data
-    long day = 24L * 3600 * 1000;
-    int i = 0;
-    while (reader.hasNext()) {
-        Object[] row = (Object[]) reader.readNextRow();
-        System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
-            i, row[0], row[1], row[2], row[3], row[4], row[5],
-            new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
-        ));
-        i++;
-    }
+// 1. Create carbon reader
+String path = "./testWriteFiles";
+CarbonReader reader = CarbonReader
+    .builder(path, "_temp")
+    .projection(new String[]{"stringField", "shortField", "intField", "longField", 
+            "doubleField", "boolField", "dateField", "timeField", "decimalField"})
+    .build();
+
+// 2. Read data
+long day = 24L * 3600 * 1000;
+int i = 0;
+while (reader.hasNext()) {
+    Object[] row = (Object[]) reader.readNextRow();
+    System.out.println(String.format("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t",
+        i, row[0], row[1], row[2], row[3], row[4], row[5],
+        new Date((day * ((int) row[6]))), new Timestamp((long) row[7] / 1000), row[8]
+    ));
+    i++;
+}
 
-    // 3. Close this reader
-    reader.close();
+// 3. Close this reader
+reader.close();
 ```
 
 Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/CarbonReaderExample.java) in the CarbonData repo.
@@ -605,114 +616,114 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
 
 ### Class org.apache.carbondata.sdk.file.CarbonReader
 ```
-   /**
-    * Return a new {@link CarbonReaderBuilder} instance
-    *
-    * @param tablePath table store path
-    * @param tableName table name
-    * @return CarbonReaderBuilder object
-    */
-  public static CarbonReaderBuilder builder(String tablePath, String tableName);
+/**
+ * Return a new {@link CarbonReaderBuilder} instance
+ *
+ * @param tablePath table store path
+ * @param tableName table name
+ * @return CarbonReaderBuilder object
+ */
+public static CarbonReaderBuilder builder(String tablePath, String tableName);
 ```
 
 ```
-  /**
-   * Return a new CarbonReaderBuilder instance
-   * Default value of table name is table + tablePath + time
-   *
-   * @param tablePath table path
-   * @return CarbonReaderBuilder object
-   */
-  public static CarbonReaderBuilder builder(String tablePath);
+/**
+ * Return a new CarbonReaderBuilder instance
+ * Default value of table name is table + tablePath + time
+ *
+ * @param tablePath table path
+ * @return CarbonReaderBuilder object
+ */
+public static CarbonReaderBuilder builder(String tablePath);
 ```
 
 ```
 /**
-  * Breaks the list of CarbonRecordReader in CarbonReader into multiple
-  * CarbonReader objects, each iterating through some 'carbondata' files
-  * and return that list of CarbonReader objects
-  *
-  * If the no. of files is greater than maxSplits, then break the
-  * CarbonReader into maxSplits splits, with each split iterating
-  * through >= 1 file.
-  *
-  * If the no. of files is less than maxSplits, then return list of
-  * CarbonReader with size as the no. of files, with each CarbonReader
-  * iterating through exactly one file
-  *
-  * @param maxSplits: Int
-  * @return list of CarbonReader objects
-  */
-  public List<CarbonReader> split(int maxSplits);
+ * Breaks the list of CarbonRecordReader in CarbonReader into multiple
+ * CarbonReader objects, each iterating through some 'carbondata' files
+ * and return that list of CarbonReader objects
+ *
+ * If the no. of files is greater than maxSplits, then break the
+ * CarbonReader into maxSplits splits, with each split iterating
+ * through >= 1 file.
+ *
+ * If the no. of files is less than maxSplits, then return list of
+ * CarbonReader with size as the no. of files, with each CarbonReader
+ * iterating through exactly one file
+ *
+ * @param maxSplits: Int
+ * @return list of CarbonReader objects
+ */
+public List<CarbonReader> split(int maxSplits);
 ```
 
 ```
-  /**
-   * Return true if has next row
-   */
-  public boolean hasNext();
+/**
+ * Return true if has next row
+ */
+public boolean hasNext();
 ```
 
 ```
-  /**
-   * Read and return next row object
-   */
-  public T readNextRow();
+/**
+ * Read and return next row object
+ */
+public T readNextRow();
 ```
 
 ```
-  /**
-   * Read and return next batch row objects
-   */
-  public Object[] readNextBatchRow();
+/**
+ * Read and return next batch row objects
+ */
+public Object[] readNextBatchRow();
 ```
 
 ```
-  /**
-   * Close reader
-   */
-  public void close();
+/**
+ * Close reader
+ */
+public void close();
 ```
 
 ### Class org.apache.carbondata.sdk.file.CarbonReaderBuilder
 ```
-  /**
-   * Construct a CarbonReaderBuilder with table path and table name
-   *
-   * @param tablePath table path
-   * @param tableName table name
-   */
-  CarbonReaderBuilder(String tablePath, String tableName);
+/**
+ * Construct a CarbonReaderBuilder with table path and table name
+ *
+ * @param tablePath table path
+ * @param tableName table name
+ */
+CarbonReaderBuilder(String tablePath, String tableName);
 ```
 
 ```
-  /**
-   * Configure the projection column names of carbon reader
-   *
-   * @param projectionColumnNames projection column names
-   * @return CarbonReaderBuilder object
-   */
-  public CarbonReaderBuilder projection(String[] projectionColumnNames);
+/**
+ * Configure the projection column names of carbon reader
+ *
+ * @param projectionColumnNames projection column names
+ * @return CarbonReaderBuilder object
+ */
+public CarbonReaderBuilder projection(String[] projectionColumnNames);
 ```
 
 ```
- /**
-  * Configure the filter expression for carbon reader
-  *
-  * @param filterExpression filter expression
-  * @return CarbonReaderBuilder object
-  */
-  public CarbonReaderBuilder filter(Expression filterExpression);
+/**
+ * Configure the filter expression for carbon reader
+ *
+ * @param filterExpression filter expression
+ * @return CarbonReaderBuilder object
+ */
+public CarbonReaderBuilder filter(Expression filterExpression);
 ```
 
 ```
-  /**
-   * Sets the batch size of records to read
-   *
-   * @param batch batch size
-   * @return updated CarbonReaderBuilder
-   */
-  public CarbonReaderBuilder withBatch(int batch);
+/**
+ * Sets the batch size of records to read
+ *
+ * @param batch batch size
+ * @return updated CarbonReaderBuilder
+ */
+public CarbonReaderBuilder withBatch(int batch);
 ```
 
 ```
@@ -722,193 +733,196 @@ Find example code at [CarbonReaderExample](https://github.com/apache/carbondata/
  * @param conf hadoop configuration support, can set s3a AK,SK,end point and other conf with this
  * @return updated CarbonReaderBuilder
  */
- public CarbonReaderBuilder withHadoopConf(Configuration conf);
+public CarbonReaderBuilder withHadoopConf(Configuration conf);
 ```
 
 ```
-  /**
-   * Updates the hadoop configuration with the given key value
-   *
-   * @param key   key word
-   * @param value value
-   * @return this object
-   */
-  public CarbonReaderBuilder withHadoopConf(String key, String value);
+/**
+ * Updates the hadoop configuration with the given key value
+ *
+ * @param key   key word
+ * @param value value
+ * @return this object
+ */
+public CarbonReaderBuilder withHadoopConf(String key, String value);
 ```
   
 ```
- /**
-   * Build CarbonReader
-   *
-   * @param <T>
-   * @return CarbonReader
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public <T> CarbonReader<T> build();
+/**
+ * Build CarbonReader
+ *
+ * @param <T>
+ * @return CarbonReader
+ * @throws IOException
+ * @throws InterruptedException
+ */
+public <T> CarbonReader<T> build();
 ```
 ### Class org.apache.carbondata.sdk.file.CarbonSchemaReader
 ```
-  /**
-   * Read schema file and return the schema
-   *
-   * @param schemaFilePath complete path including schema file name
-   * @return schema object
-   * @throws IOException
-   */
-  @Deprecated
-  public static Schema readSchemaInSchemaFile(String schemaFilePath);
-```
-
-```
-  /**
-   * Read carbondata file and return the schema
-   *
-   * @param dataFilePath complete path including carbondata file name
-   * @return Schema object
-   */
-  @Deprecated
-  public static Schema readSchemaInDataFile(String dataFilePath);
-```
-
-```
-  /**
-   * Read carbonindex file and return the schema
-   *
-   * @param indexFilePath complete path including index file name
-   * @return schema object
-   * @throws IOException
-   */
-  @Deprecated
-  public static Schema readSchemaInIndexFile(String indexFilePath);
-```
-
-```
-  /**
-   * read schema from path,
-   * path can be folder path,carbonindex file path, and carbondata file path
-   * and will not check all files schema
-   *
-   * @param path file/folder path
-   * @return schema
-   * @throws IOException
-   */
-  public static Schema readSchema(String path);
-```
-
-```
-  /**
-   * read schema from path,
-   * path can be folder path,carbonindex file path, and carbondata file path
-   * and user can decide whether check all files schema
-   *
-   * @param path             file/folder path
-   * @param validateSchema whether check all files schema
-   * @return schema
-   * @throws IOException
-   */
-  public static Schema readSchema(String path, boolean validateSchema);
-```
-
-```
-  /**
-   * read schema from path,
-   * path can be folder path, carbonindex file path, and carbondata file path
-   * and will not check all files schema
-   *
-   * @param path file/folder path
-   * @param conf hadoop configuration support, can set s3a AK,SK,end point and other conf with this
-   * @return schema
-   * @throws IOException
-   */
-  public static Schema readSchema(String path, Configuration conf);
-```
-
-```
-  /**
-   * read schema from path,
-   * path can be folder path, carbonindex file path, and carbondata file path
-   * and user can decide whether check all files schema
-   *
-   * @param path           file/folder path
-   * @param validateSchema whether check all files schema
-   * @param conf           hadoop configuration support, can set s3a AK,SK,
-   *                       end point and other conf with this
-   * @return schema
-   * @throws IOException
-   */
-  public static Schema readSchema(String path, boolean validateSchema, Configuration conf);
-```
-
-```
-  /**
-   * This method return the version details in formatted string by reading from carbondata file
-   * If application name is SDK_1.0.0 and this has written the carbondata file in carbondata 1.6 project version,
-   * then this API returns the String "SDK_1.0.0 in version: 1.6.0-SNAPSHOT"
-   * @param dataFilePath complete path including carbondata file name
-   * @return string with information of who has written this file in which carbondata project version
-   * @throws IOException
-   */
-  public static String getVersionDetails(String dataFilePath);
+/**
+ * Read schema file and return the schema
+ *
+ * @param schemaFilePath complete path including schema file name
+ * @return schema object
+ * @throws IOException
+ */
+@Deprecated
+public static Schema readSchemaInSchemaFile(String schemaFilePath);
+```
+
+```
+/**
+ * Read carbondata file and return the schema
+ *
+ * @param dataFilePath complete path including carbondata file name
+ * @return Schema object
+ */
+@Deprecated
+public static Schema readSchemaInDataFile(String dataFilePath);
+```
+
+```
+/**
+ * Read carbonindex file and return the schema
+ *
+ * @param indexFilePath complete path including index file name
+ * @return schema object
+ * @throws IOException
+ */
+@Deprecated
+public static Schema readSchemaInIndexFile(String indexFilePath);
+```
+
+```
+/**
+ * Read schema from path,
+ * path can be folder path,carbonindex file path, and carbondata file path
+ * and will not check all files schema
+ *
+ * @param path file/folder path
+ * @return schema
+ * @throws IOException
+ */
+public static Schema readSchema(String path);
+```
+
+```
+/**
+ * Read schema from path,
+ * path can be folder path,carbonindex file path, and carbondata file path
+ * and user can decide whether check all files schema
+ *
+ * @param path             file/folder path
+ * @param validateSchema whether check all files schema
+ * @return schema
+ * @throws IOException
+ */
+public static Schema readSchema(String path, boolean validateSchema);
+```
+
+```
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and will not check all files schema
+ *
+ * @param path file/folder path
+ * @param conf hadoop configuration support, can set s3a AK,SK,end point and other conf with this
+ * @return schema
+ * @throws IOException
+ */
+public static Schema readSchema(String path, Configuration conf);
+```
+
+```
+/**
+ * Read schema from path,
+ * path can be folder path, carbonindex file path, and carbondata file path
+ * and user can decide whether check all files schema
+ *
+ * @param path           file/folder path
+ * @param validateSchema whether check all files schema
+ * @param conf           hadoop configuration support, can set s3a AK,SK,
+ *                       end point and other conf with this
+ * @return schema
+ * @throws IOException
+ */
+public static Schema readSchema(String path, boolean validateSchema, Configuration conf);
+```
+
+```
+/**
+ * This method return the version details in formatted string by reading from carbondata file
+ * If application name is SDK_1.0.0 and this has written the carbondata file in carbondata 1.6 project version,
+ * then this API returns the String "SDK_1.0.0 in version: 1.6.0-SNAPSHOT"
+ *
+ * @param dataFilePath complete path including carbondata file name
+ * @return string with information of who has written this file in which carbondata project version
+ * @throws IOException
+ */
+public static String getVersionDetails(String dataFilePath);
 ```
 
 ### Class org.apache.carbondata.sdk.file.Schema
 ```
-  /**
-   * construct a schema with fields
-   * @param fields
-   */
-  public Schema(Field[] fields);
+/**
+ * Construct a schema with fields
+ *
+ * @param fields
+ */
+public Schema(Field[] fields);
 ```
 
 ```
-  /**
-   * construct a schema with List<ColumnSchema>
-   *
-   * @param columnSchemaList column schema list
-   */
-  public Schema(List<ColumnSchema> columnSchemaList);
+/**
+ * Construct a schema with List<ColumnSchema>
+ *
+ * @param columnSchemaList column schema list
+ */
+public Schema(List<ColumnSchema> columnSchemaList);
 ```
 
 ```
-  /**
-   * Create a Schema using JSON string, for example:
-   * [
-   *   {"name":"string"},
-   *   {"age":"int"}
-   * ]
-   * @param json specified as string
-   * @return Schema
-   */
-  public static Schema parseJson(String json);
+/**
+ * Create a Schema using JSON string, for example:
+ * [
+ *   {"name":"string"},
+ *   {"age":"int"}
+ * ]
+ * @param json specified as string
+ * @return Schema
+ */
+public static Schema parseJson(String json);
 ```
 
 ```
-  /**
-   * Sort the schema order as original order
-   *
-   * @return Schema object
-   */
-  public Schema asOriginOrder();
+/**
+ * Sort the schema order as original order
+ *
+ * @return Schema object
+ */
+public Schema asOriginOrder();
 ```
 
 ### Class org.apache.carbondata.sdk.file.Field
 ```
-  /**
-   * Field Constructor
-   * @param name name of the field
-   * @param type datatype of field, specified in strings.
-   */
-  public Field(String name, String type);
+/**
+ * Field Constructor
+ *
+ * @param name name of the field
+ * @param type datatype of field, specified in strings.
+ */
+public Field(String name, String type);
 ```
 
 ```
-  /**
-   * Construct Field from ColumnSchema
-   *
-   * @param columnSchema ColumnSchema, Store the information about the column meta data
-   */
-  public Field(ColumnSchema columnSchema);
+/**
+ * Construct Field from ColumnSchema
+ *
+ * @param columnSchema ColumnSchema, Store the information about the column meta data
+ */
+public Field(ColumnSchema columnSchema);
 ```
 
 Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob/master/examples/spark2/src/main/java/org/apache/carbondata/examples/sdk/SDKS3Example.java) in the CarbonData repo.
@@ -920,44 +934,44 @@ Find S3 example code at [SDKS3Example](https://github.com/apache/carbondata/blob
 
 ```
 /**
-* This method will be responsible to get the instance of CarbonProperties class
-*
-* @return carbon properties instance
-*/
+ * This method will be responsible to get the instance of CarbonProperties class
+ *
+ * @return carbon properties instance
+ */
 public static CarbonProperties getInstance();
 ```
 
 ```
 /**
-* This method will be used to add a new property
-*
-* @param key is a property name to set for carbon.
-* @param value is valid parameter corresponding to property.
-* @return CarbonProperties object
-*/
+ * This method will be used to add a new property
+ *
+ * @param key is a property name to set for carbon.
+ * @param value is valid parameter corresponding to property.
+ * @return CarbonProperties object
+ */
 public CarbonProperties addProperty(String key, String value);
 ```
 
 ```
 /**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value.
-* @return properties value for corresponding key. If not set, then returns null.
-*/
+ * This method will be used to get the property value. If property is not
+ * present, then it will return the default value.
+ *
+ * @param key is a property name to get user specified value.
+ * @return properties value for corresponding key. If not set, then returns null.
+ */
 public String getProperty(String key);
 ```
 
 ```
 /**
-* This method will be used to get the property value. If property is not
-* present, then it will return the default value.
-*
-* @param key is a property name to get user specified value..
-* @param defaultValue used to be returned by function if corrosponding key not set.
-* @return properties value for corresponding key. If not set, then returns specified defaultValue.
-*/
+ * This method will be used to get the property value. If property is not
+ * present, then it will return the default value.
+ *
+ * @param key is a property name to get user specified value..
+ * @param defaultValue used to be returned by function if corrosponding key not set.
+ * @return properties value for corresponding key. If not set, then returns specified defaultValue.
+ */
 public String getProperty(String key, String defaultValue);
 ```
 Reference : [list of carbon properties](./configuration-parameters.md)