You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ja...@apache.org on 2017/10/30 09:22:13 UTC

[23/35] carbondata git commit: [CARBONDATA-1597] Remove spark1 integration

[CARBONDATA-1597] Remove spark1 integration

As voted by community, spark version 1 integration can be removed. This PR removes following profiles and module for spark version 1 integration:1)spark-1.5 profile, 2) spark-1.6 profile, 3) example/spark module, 4) integration/spark module

This closes #1421


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/0bf597d9
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/0bf597d9
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/0bf597d9

Branch: refs/heads/streaming_ingest
Commit: 0bf597d9e5268c74eb38751b2ef26cd4d5e80f07
Parents: 311a5b7
Author: Jacky Li <ja...@qq.com>
Authored: Thu Oct 19 16:11:44 2017 +0800
Committer: chenliang613 <ch...@apache.org>
Committed: Thu Oct 19 16:02:49 2017 +0530

----------------------------------------------------------------------
 bin/carbon-spark-sql                            |    5 +-
 .../spark/CARBON_EXAMPLESLogResource.properties |   18 -
 examples/spark/pom.xml                          |   85 --
 .../spark/src/main/resources/complexdata.csv    |  101 --
 examples/spark/src/main/resources/data.csv      |   11 -
 examples/spark/src/main/resources/dimSample.csv |   21 -
 .../spark/src/main/resources/factSample.csv     |   51 -
 .../examples/AllDictionaryExample.scala         |   67 --
 .../carbondata/examples/AlluxioExample.scala    |   65 --
 .../carbondata/examples/CarbonExample.scala     |   61 -
 .../examples/CarbonPartitionExample.scala       |  148 ---
 .../examples/CaseClassDataFrameAPIExample.scala |   49 -
 .../examples/ComplexTypeExample.scala           |   78 --
 .../examples/DataFrameAPIExample.scala          |   51 -
 .../examples/DataManagementExample.scala        |   78 --
 .../examples/DataUpdateDeleteExample.scala      |  185 ---
 .../carbondata/examples/DatasourceExample.scala |   44 -
 .../carbondata/examples/DirectSQLExample.scala  |   47 -
 .../examples/GenerateDictionaryExample.scala    |   96 --
 .../carbondata/examples/HadoopFileExample.scala |   53 -
 .../apache/carbondata/examples/PerfTest.scala   |  328 ------
 .../examples/util/AllDictionaryUtil.scala       |  109 --
 .../carbondata/examples/util/ExampleUtils.scala |   97 --
 examples/spark2/pom.xml                         |   14 -
 integration/hive/pom.xml                        |   20 -
 integration/presto/pom.xml                      |    8 -
 integration/spark-common-cluster-test/pom.xml   |   36 -
 integration/spark-common-test/pom.xml           |   36 -
 integration/spark-common/pom.xml                |   10 -
 ...CARBON_SPARK_INTERFACELogResource.properties |   18 -
 integration/spark/pom.xml                       |  194 ----
 .../readsupport/SparkRowReadSupportImpl.java    |   76 --
 .../spark/CarbonDataFrameWriter.scala           |  202 ----
 .../spark/rdd/CarbonDataRDDFactory.scala        | 1088 ------------------
 .../spark/thriftserver/CarbonThriftServer.scala |   66 --
 .../carbondata/spark/util/CarbonSparkUtil.scala |   45 -
 .../carbondata/spark/util/QueryPlanUtil.scala   |   56 -
 .../org/apache/spark/CarbonInputMetrics.scala   |   66 --
 .../apache/spark/sql/CarbonBoundReference.scala |   46 -
 .../spark/sql/CarbonCatalystOperators.scala     |  166 ---
 .../org/apache/spark/sql/CarbonContext.scala    |  197 ----
 .../sql/CarbonDatasourceHadoopRelation.scala    |  189 ---
 .../spark/sql/CarbonDatasourceRelation.scala    |  321 ------
 .../spark/sql/CarbonDictionaryDecoder.scala     |  259 -----
 .../scala/org/apache/spark/sql/CarbonEnv.scala  |   56 -
 .../org/apache/spark/sql/CarbonSQLConf.scala    |   36 -
 .../scala/org/apache/spark/sql/CarbonScan.scala |  163 ---
 .../org/apache/spark/sql/CarbonSparkUtil.scala  |   46 -
 .../org/apache/spark/sql/CarbonSqlParser.scala  |  589 ----------
 .../apache/spark/sql/CodeGenerateFactory.scala  |  155 ---
 .../sql/CustomDeterministicExpression.scala     |   41 -
 .../spark/sql/SparkUnknownExpression.scala      |  130 ---
 .../sql/execution/command/IUDCommands.scala     |  842 --------------
 .../execution/command/carbonTableSchema.scala   | 1019 ----------------
 .../spark/sql/hive/CarbonAnalysisRules.scala    |  175 ---
 .../spark/sql/hive/CarbonHiveMetadataUtil.scala |   58 -
 .../apache/spark/sql/hive/CarbonMetastore.scala |  562 ---------
 .../spark/sql/hive/CarbonSQLDialect.scala       |   44 -
 .../spark/sql/hive/CarbonStrategies.scala       |  370 ------
 .../apache/spark/sql/hive/HiveQlWrapper.scala   |   32 -
 .../spark/sql/hive/cli/CarbonSQLCLIDriver.scala |   83 --
 .../execution/command/CarbonHiveCommands.scala  |   55 -
 .../spark/sql/optimizer/CarbonFilters.scala     |  431 -------
 .../spark/sql/optimizer/CarbonOptimizer.scala   |  862 --------------
 .../spark/sql/test/SparkTestQueryExecutor.scala |   55 -
 .../org/apache/spark/util/TaskContextUtil.scala |   29 -
 ....apache.spark.sql.sources.DataSourceRegister |   17 -
 ...che.spark.sql.test.TestQueryExecutorRegister |   17 -
 .../src/test/resources/badrecords/test2.csv     |    4 -
 ...plexPrimitiveTimestampDirectDictionary.scala |   65 --
 .../dataload/SparkDatasourceSuite.scala         |  192 ----
 .../TestLoadDataWithSingleQuotechar.scala       |   57 -
 .../allqueries/AllQueriesSpark1TestCase.scala   |   60 -
 .../InsertIntoCarbonTableSpark1TestCase.scala   |   81 --
 .../BadRecordLoggerSharedDictionaryTest.scala   |   84 --
 .../createtable/TestCreateTableSyntax.scala     |  186 ---
 .../CompactionSystemLockFeatureTest.scala       |  143 ---
 .../DataCompactionMinorThresholdTest.scala      |  103 --
 .../DataCompactionNoDictionaryTest.scala        |  173 ---
 .../datacompaction/DataCompactionTest.scala     |  224 ----
 .../GrtLtFilterProcessorTestCase.scala          |   64 --
 .../HadoopFSRelationTestCase.scala              |   68 --
 .../spark/util/AllDictionaryTestCase.scala      |  140 ---
 .../spark/util/DictionaryTestCaseUtil.scala     |   51 -
 .../util/ExternalColumnDictionaryTestCase.scala |  252 ----
 ...GlobalDictionaryUtilConcurrentTestCase.scala |  183 ---
 .../util/GlobalDictionaryUtilTestCase.scala     |  210 ----
 .../apache/spark/sql/TestCarbonSqlParser.scala  |  327 ------
 integration/spark2/pom.xml                      |   10 -
 pom.xml                                         |   69 +-
 processing/pom.xml                              |    6 -
 91 files changed, 5 insertions(+), 13575 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/bin/carbon-spark-sql
----------------------------------------------------------------------
diff --git a/bin/carbon-spark-sql b/bin/carbon-spark-sql
index 2c799d1..4b927d1 100755
--- a/bin/carbon-spark-sql
+++ b/bin/carbon-spark-sql
@@ -33,10 +33,7 @@ fi
 export FWDIR=$SPARK_HOME
 export CARBON_SOURCE="$(cd "`dirname "$0"`"/..; pwd)"
 
-ASSEMBLY_DIR="$CARBON_SOURCE/assembly/target/scala-2.10"
-if [ -d "$CARBON_SOURCE/assembly/target/scala-2.11" ]; then
-  ASSEMBLY_DIR="$CARBON_SOURCE/assembly/target/scala-2.11"
-fi
+ASSEMBLY_DIR="$CARBON_SOURCE/assembly/target/scala-2.11"
 
 GREP_OPTIONS=
 num_jars="$(ls -1 "$ASSEMBLY_DIR" | grep "^carbondata.*hadoop.*\.jar$" | wc -l)"

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/CARBON_EXAMPLESLogResource.properties
----------------------------------------------------------------------
diff --git a/examples/spark/CARBON_EXAMPLESLogResource.properties b/examples/spark/CARBON_EXAMPLESLogResource.properties
deleted file mode 100644
index 78f873e..0000000
--- a/examples/spark/CARBON_EXAMPLESLogResource.properties
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-# limitations under the License.
-#
-carbon.examples = {0}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/pom.xml
----------------------------------------------------------------------
diff --git a/examples/spark/pom.xml b/examples/spark/pom.xml
deleted file mode 100644
index cc078ff..0000000
--- a/examples/spark/pom.xml
+++ /dev/null
@@ -1,85 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one or more
-    contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership.
-    The ASF licenses this file to You under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with
-    the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.carbondata</groupId>
-    <artifactId>carbondata-parent</artifactId>
-    <version>1.3.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>carbondata-examples-spark</artifactId>
-  <name>Apache CarbonData :: Spark Examples</name>
-
-  <properties>
-    <dev.path>${basedir}/../../dev</dev.path>
-  </properties>
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark</artifactId>
-      <version>${project.version}</version>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <sourceDirectory>src/main/scala</sourceDirectory>
-    <resources>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>CARBON_EXAMPLESLogResource.properties</include>
-        </includes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <version>2.15.2</version>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-        </configuration>
-      </plugin>
-    </plugins>
-  </build>
-
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/resources/complexdata.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/complexdata.csv b/examples/spark/src/main/resources/complexdata.csv
deleted file mode 100644
index 23a3949..0000000
--- a/examples/spark/src/main/resources/complexdata.csv
+++ /dev/null
@@ -1,101 +0,0 @@
-deviceInformationId,channelsId,ROMSize,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber
-1,109,4ROM size,29-11-2015,1AA1$2BB1,MAC1$MAC2$MAC3,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,29-11-2015$29-11-2015:29-11-2015,109,2738.562
-10,93,1ROM size,29-11-2015,1AA10$2BB10,MAC4$MAC5$MAC6,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,30-11-2015$30-11-2015:30-11-2015,93,1714.635
-100,2591,2ROM size,29-11-2015,1AA100$2BB100,MAC7$MAC8$MAC9,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,01-12-2015$01-12-2015:01-12-2015,2591,1271
-1000,2531,2ROM size,29-11-2015,1AA1000$2BB1000,MAC10$$MAC12,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,02-12-2015$02-12-2015:02-12-2015,2531,692
-10000,2408,0ROM size,29-11-2015,1AA10000$2BB10000,MAC13$$MAC15,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,03-12-2015$03-12-2015:03-12-2015,2408,2175
-100000,1815,0ROM size,29-11-2015,1AA100000$2BB100000,MAC16$$MAC18,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,04-12-2015$04-12-2015:04-12-2015,1815,136
-1000000,2479,4ROM size,29-11-2015,1AA1000000$2BB1000000,MAC19$$MAC21,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,05-12-2015$05-12-2015:05-12-2015,2479,1600
-100001,1845,7ROM size,29-11-2015,1AA100001$,MAC22$$MAC24,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,06-12-2015$06-12-2015:06-12-2015,1845,505
-100002,2008,1ROM size,29-11-2015,1AA100002$,MAC25$$MAC27,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,07-12-2015$07-12-2015:07-12-2015,2008,1341
-100003,1121,5ROM size,29-11-2015,1AA100003$,MAC28$$MAC30,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,08-12-2015$08-12-2015:08-12-2015,1121,2239
-100004,1511,8ROM size,29-11-2015,1AA100004$,MAC31$$MAC33,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,09-12-2015$09-12-2015:09-12-2015,1511,2970
-100005,2759,0ROM size,29-11-2015,1AA100005$,MAC34$$MAC36,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,10-12-2015$10-12-2015:10-12-2015,2759,2593
-100006,2069,7ROM size,29-11-2015,1AA100006$,MAC37$$MAC39,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,11-12-2015$11-12-2015:11-12-2015,2069,2572
-100007,396,7ROM size,29-11-2015,1AA100007$,MAC40$$MAC42,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,12-12-2015$12-12-2015:12-12-2015,396,1991
-100008,104,2ROM size,29-11-2015,1AA100008$,MAC43$$MAC45,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,13-12-2015$13-12-2015:13-12-2015,104,1442
-100009,477,3ROM size,29-11-2015,1AA100009$,MAC46$$MAC48,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,14-12-2015$14-12-2015:14-12-2015,477,1841
-10001,546,8ROM size,29-11-2015,1AA10001$2,MAC49$$MAC51,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,15-12-2015$15-12-2015:15-12-2015,546,298
-100010,2696,3ROM size,29-11-2015,1AA100010$2BB100010,MAC52$$MAC54,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,16-12-2015$16-12-2015:16-12-2015,2696,79
-100011,466,2ROM size,29-11-2015,1AA100011$2BB100011,MAC55$$MAC57,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,17-12-2015$17-12-2015:17-12-2015,466,202
-100012,2644,2ROM size,29-11-2015,1AA100012$2BB100012,MAC58$$MAC60,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,18-12-2015$18-12-2015:18-12-2015,2644,568
-100013,2167,3ROM size,29-11-2015,1AA100013$2BB100013,MAC61$MAC62,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,19-12-2015$19-12-2015:19-12-2015,2167,355
-100014,1069,7ROM size,29-11-2015,1AA100014$2BB100014,MAC64$MAC65,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,20-12-2015$20-12-2015:20-12-2015,1069,151
-100015,1447,9ROM size,29-11-2015,1AA100015$2BB100015,MAC67$MAC68,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,21-12-2015$21-12-2015:21-12-2015,1447,2863
-100016,2963,3ROM size,29-11-2015,1AA100016$2BB100016,MAC70$MAC71,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,22-12-2015$22-12-2015:22-12-2015,2963,1873
-100017,1580,5ROM size,29-11-2015,1AA100017$2BB100017,MAC73$MAC74,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,23-12-2015$23-12-2015:23-12-2015,1580,2205
-100018,446,2ROM size,29-11-2015,1AA100018$2BB100018,MAC76$MAC77,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,24-12-2015$24-12-2015:24-12-2015,446,441
-100019,2151,7ROM size,29-11-2015,1AA100019$2BB100019,MAC79$MAC80,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,25-12-2015$25-12-2015:25-12-2015,2151,2194
-10002,2201,1ROM size,29-11-2015,2BB10002,MAC82$MAC83,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,26-12-2015$26-12-2015:26-12-2015,2201,2972
-100020,2574,5ROM size,29-11-2015,$2BB100020,MAC85$MAC86,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,27-12-2015$27-12-2015:27-12-2015,2574,256
-100021,1734,4ROM size,29-11-2015,$2BB100021,MAC88$MAC89,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,28-12-2015$28-12-2015:28-12-2015,1734,1778
-100022,155,3ROM size,29-11-2015,$2BB100022,MAC91$MAC92,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,29-12-2015$29-12-2015:29-12-2015,155,1999
-100023,1386,8ROM size,29-11-2015,$2BB100023,MAC94$MAC95,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,30-12-2015$30-12-2015:30-12-2015,1386,2194
-100024,1017,9ROM size,29-11-2015,$2BB100024,MAC97$MAC98,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,,1017,2483
-100025,47,2ROM size,29-11-2015,$2BB100025,$MAC101$MAC102,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,,47,1724
-100026,2930,7ROM size,29-11-2015,$2BB100026,$MAC104$MAC105,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,,2930,1768
-100027,2940,0ROM size,29-11-2015,$2BB100027,$MAC107$MAC108,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,,2940,2436
-100028,297,5ROM size,29-11-2015,$2BB100028,$MAC110$MAC111,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,,297,2849
-100029,1695,2ROM size,29-11-2015,$2BB100029,$MAC113$MAC114,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,,1695,1691
-10003,1326,7ROM size,29-11-2015,2BB10003,$MAC116$MAC117,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,,1326,2071
-100030,513,7ROM size,29-11-2015,$2BB100030,$MAC119$MAC120,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,07-01-2016$07-01-2016:,513,1333
-100031,1741,1ROM size,29-11-2015,$2BB100031,$MAC122$MAC123,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,08-01-2016$08-01-2016:,1741,1080
-100032,1198,0ROM size,29-11-2015,$2BB100032,$MAC125$MAC126,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,09-01-2016$09-01-2016:,1198,1053
-100033,273,9ROM size,29-11-2015,$2BB100033,$MAC128$MAC129,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,10-01-2016$10-01-2016:,273,760
-100034,1234,6ROM size,29-11-2015,$2BB100034,$MAC131$MAC132,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,11-01-2016$11-01-2016:,1234,2061
-100035,1619,1ROM size,29-11-2015,$2BB100035,$MAC134$MAC135,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,12-01-2016$12-01-2016:,1619,2142
-100036,2415,2ROM size,29-11-2015,$2BB100036,$MAC137$MAC138,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,13-01-2016$13-01-2016:,2415,2224
-100037,2381,2ROM size,29-11-2015,$2BB100037,$MAC140$MAC141,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,14-01-2016$14-01-2016:,2381,1015
-100038,872,7ROM size,29-11-2015,1AA100038$2BB100038,$MAC143$MAC144,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,15-01-2016$15-01-2016,872,1229
-100039,1835,9ROM size,29-11-2015,1AA100039$2BB100039,$$MAC147,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,16-01-2016$16-01-2016,1835,1750
-10004,2597,1ROM size,29-11-2015,1AA10004$2BB10004,$$MAC150,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,17-01-2016$17-01-2016,2597,1717
-100040,1969,9ROM size,29-11-2015,1AA100040$2BB100040,$$MAC153,,18-01-2016$18-01-2016,1969,2078
-100041,2133,8ROM size,29-11-2015,$,$$MAC156,,19-01-2016$19-01-2016,2133,2734
-100042,631,9ROM size,29-11-2015,$,$$MAC159,,20-01-2016$20-01-2016,631,2745
-100043,187,4ROM size,29-11-2015,$,$$MAC162,2:Chinese::guangzhou:longhua:mingzhi$2:India::guangzhou:longhua:mingzhi,21-01-2016$21-01-2016,187,571
-100044,1232,5ROM size,29-11-2015,$,$$MAC165,2::Guangdong Province:guangzhou:longhua:mingzhi$2::Guangdong Province:guangzhou:longhua:mingzhi,22-01-2016$22-01-2016,1232,1697
-100045,1602,6ROM size,29-11-2015,$,$$MAC168,4:Chinese:Hunan Province::xiangtan:jianshelu$4:India:Hunan Province::xiangtan:jianshelu,23-01-2016$23-01-2016,1602,2553
-100046,2319,9ROM size,29-11-2015,$,$$MAC171,2:Chinese:Guangdong Province:guangzhou::mingzhi$2:India:Guangdong Province:guangzhou::mingzhi,24-01-2016$24-01-2016,2319,1077
-100047,839,4ROM size,29-11-2015,$,$$MAC174,5:Chinese:Hunan Province:zhuzhou:tianyuan:$5:India:Hunan Province:zhuzhou:tianyuan:,25-01-2016$25-01-2016,839,1823
-100048,1184,2ROM size,29-11-2015,$,$$MAC177,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,26-01-2016$:,1184,2399
-100049,2705,2ROM size,29-11-2015,$,$$MAC180,2:Chinese:Guangdong Province$2:India:Guangdong Province,27-01-2016$:,2705,2890
-10005,1185,1ROM size,29-11-2015,,$$MAC183,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,28-01-2016$:,1185,1608
-100050,2457,9ROM size,29-11-2015,,$$MAC186,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,29-01-2016$:,2457,29
-100051,2320,8ROM size,29-11-2015,,$$MAC189,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,30-01-2016$:,2320,1407
-100052,2300,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,31-01-2016$:,2300,845
-100053,1210,4ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,01-02-2016$:,1210,1655
-100054,1689,8ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,02-02-2016$:,1689,1368
-100055,2823,2ROM size,29-11-2015,,$$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$03-02-2016:03-02-2016,2823,1728
-100056,68,6ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,$04-02-2016:04-02-2016,68,750
-100057,716,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$05-02-2016:05-02-2016,716,2288
-100058,864,6ROM size,29-11-2015,,$$,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$06-02-2016:06-02-2016,864,2635
-100059,499,6ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,$07-02-2016:07-02-2016,499,1337
-10006,1429,3ROM size,29-11-2015,,$$,:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$:India:Guangdong Province:guangzhou:longhua:mingzhi,$08-02-2016:08-02-2016,1429,2478
-100060,2176,2ROM size,29-11-2015,,$$,:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$:India:Hunan Province:xiangtan:xiangtan:jianshelu,$09-02-2016:09-02-2016,2176,538
-100061,2563,7ROM size,29-11-2015,,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,$10-02-2016:10-02-2016,2563,1407
-100062,2594,3ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$11-02-2016:11-02-2016,2594,2952
-100063,2142,1ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,$12-02-2016:12-02-2016,2142,1226
-100064,138,0ROM size,29-11-2015,1AA100064$2BB100064,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$13-02-2016:13-02-2016,138,865
-100065,1168,6ROM size,29-11-2015,1AA100065$2BB100065,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$14-02-2016:14-02-2016,1168,901
-100066,2828,5ROM size,29-11-2015,1AA100066$2BB100066,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,$:,2828,1864
-100067,1160,0ROM size,29-11-2015,1AA100067$2BB100067,,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,$:,1160,572
-100068,1890,6ROM size,29-11-2015,1AA100068$2BB100068,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,1890,412
-100069,1195,4ROM size,29-11-2015,1AA100069$2BB100069,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$:,1195,1491
-10007,2797,9ROM size,29-11-2015,1AA10007$2BB10007,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,$:,2797,1350
-100070,44,5ROM size,29-11-2015,1AA100070$2BB100070,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,44,1567
-100071,1683,6ROM size,29-11-2015,1AA100071$2BB100071,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,$:,1683,1973
-100072,1085,1ROM size,29-11-2015,1AA100072$2BB100072,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan Province:changsha:yuhua:shazitang,22-02-2016$22-02-2016:22-02-2016,1085,448
-100073,776,7ROM size,29-11-2015,1AA100073$2BB100073,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,23-02-2016$23-02-2016:23-02-2016,776,2488
-100074,2074,9ROM size,29-11-2015,1AA100074$2BB100074,MAC262$MAC263$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,24-02-2016$24-02-2016:24-02-2016,2074,907
-100075,1062,2ROM size,29-11-2015,1AA100075$2BB100075,MAC265$MAC266$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,25-02-2016$25-02-2016:25-02-2016,1062,2507
-100076,987,7ROM size,29-11-2015,1AA100076$2BB100076,MAC268$MAC269$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New Delhi:wuhan:hongshan:hongshan,26-02-2016$26-02-2016:26-02-2016,987,732
-100077,2799,9ROM size,29-11-2015,1AA100077$2BB100077,MAC271$MAC272$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,27-02-2016$27-02-2016:27-02-2016,2799,2077
-100078,2765,1ROM size,29-11-2015,1AA100078$2BB100078,MAC274$MAC275$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New Delhi:delhi:delhi:delhi,28-02-2016$28-02-2016:28-02-2016,2765,1434
-100079,2164,1ROM size,29-11-2015,1AA100079$2BB100079,MAC277$MAC278$,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,29-02-2016$29-02-2016:29-02-2016,2164,1098
-10008,1624,6ROM size,29-11-2015,1AA10008$2BB10008,MAC280$MAC281$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,01-03-2016$01-03-2016:01-03-2016,1624,813
-100080,2355,1ROM size,29-11-2015,1AA100080$2BB100080,MAC283$MAC284$MAC285,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,02-03-2016$02-03-2016:02-03-2016,2355,954
-100081,1650,6ROM size,29-11-2015,1AA100081$2BB100081,MAC286$MAC287$MAC288,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,03-03-2016$03-03-2016:03-03-2016,1650,613
-100082,2761,3ROM size,29-11-2015,1AA100082$2BB100082,MAC289$MAC290$MAC291,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,04-03-2016$04-03-2016:04-03-2016,2761,2348
-100083,1856,3ROM size,29-11-2015,1AA100083$2BB100083,MAC292$MAC293$MAC294,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,05-03-2016$05-03-2016:05-03-2016,1856,2192
-100084,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826
-100085,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/resources/data.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/data.csv b/examples/spark/src/main/resources/data.csv
deleted file mode 100644
index 5696978..0000000
--- a/examples/spark/src/main/resources/data.csv
+++ /dev/null
@@ -1,11 +0,0 @@
-ID,date,country,name,phonetype,serialname,salary,floatField
-1,2015/7/23,china,aaa1,phone197,ASD69643,15000,2.34
-2,2015/7/24,china,aaa2,phone756,ASD42892,15001,2.34
-3,2015/7/25,china,aaa3,phone1904,ASD37014,15002,2.34
-4,2015/7/26,china,aaa4,phone2435,ASD66902,15003,2.34
-5,2015/7/27,china,aaa5,phone2441,ASD90633,15004,2.34
-6,2015/7/28,china,aaa6,phone294,ASD59961,15005,3.5
-7,2015/7/29,china,aaa7,phone610,ASD14875,15006,2.34
-8,2015/7/30,china,aaa8,phone1848,ASD57308,15007,2.34
-9,2015/7/18,china,aaa9,phone706,ASD86717,15008,2.34
-10,2015/7/19,usa,aaa10,phone685,ASD30505,15009,2.34
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/resources/dimSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/dimSample.csv b/examples/spark/src/main/resources/dimSample.csv
deleted file mode 100644
index 0c8f27a..0000000
--- a/examples/spark/src/main/resources/dimSample.csv
+++ /dev/null
@@ -1,21 +0,0 @@
-id,name,city
-1,David,Beijing
-2,Mark,Paris
-3,Bill,NewYork
-4,Sara,Tokyo
-5,John,Beijing
-6,Michel,Chicago
-7,Robert,Houston
-8,Sunny,Boston
-9,Mary,Tokyo
-10,Edward,Paris
-11,James,Washington
-12,Maria,Berlin
-13,Adam,Athens
-14,Peter,Boston
-15,George,Paris
-16,Paul,Shanghai
-17,Lisa,Hangzhou
-18,Angel,Beijing
-19,Emily,Bangalore
-20,Kevin,Singapore
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/resources/factSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/resources/factSample.csv b/examples/spark/src/main/resources/factSample.csv
deleted file mode 100644
index 9693156..0000000
--- a/examples/spark/src/main/resources/factSample.csv
+++ /dev/null
@@ -1,51 +0,0 @@
-id,name,city,salary
-1,David,Beijing,15000
-1,David,Tokyo,20000
-1,David,Hangzhou,18000
-2,Mark,Paris,12000
-2,Mark,Boston,15000
-2,Mark,Chicago,18000
-3,Bill,NewYork,20000
-3,Bill,Boston,23000
-4,Sara,Tokyo,11000
-4,Sara,Paris,15000
-4,Sara,Chicago,21000
-4,Sara,Hangzhou,17000
-5,John,Beijing,15000
-5,John,Shanghai,16000
-6,Michel,Chicago,11000
-6,Michel,Boston,12000
-6,Michel,Tokyo,11000
-8,Sunny,Boston,14000
-8,Sunny,Beijing,22000
-8,Sunny,Tokyo,20000
-9,Mary,Tokyo,13000
-9,Mary,NewYork,18000
-9,Mary,Paris,16000
-9,Mary,Washington,20000
-9,Mary,Boston,17000
-10,Edward,Paris,20000
-10,Edward,Beijing,12000
-10,Edward,Berlin,15000
-11,James,Washington,16000
-12,Maria,Berlin,15000
-12,Maria,Beijing,16000
-13,Adam,Athens,21000
-13,Adam,Berlin,18000
-13,Adam,Hangzhou,17000
-14,Peter,Boston,20000
-14,Peter,Berlin,21000
-14,Peter,Shanghai,18000
-15,George,Paris,17000
-15,George,Tokyo,12000
-15,George,Beijing,15000
-15,George,Berlin,18000
-16,Paul,Shanghai,22000
-16,Paul,Tokyo,19000
-16,Paul,Paris,24000
-16,Paul,Hangzhou,22000
-18,Angel,Beijing,22000
-18,Angel,NewYork,25000
-18,Angel,Tokyo,22000
-20,Kevin,Singapore,18000
-20,Kevin,Bangalore,16000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
deleted file mode 100644
index 9012dcf..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.examples.util.{AllDictionaryUtil, ExampleUtils}
-
-object AllDictionaryExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("AllDictionaryExample")
-    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
-    val csvHeader = "ID,date,country,name,phonetype,serialname,salary"
-    val dictCol = "|date|country|name|phonetype|serialname|"
-    val allDictFile = ExampleUtils.currentPath + "/src/main/resources/data.dictionary"
-    // extract all dictionary files from source data
-    AllDictionaryUtil.extractDictionary(cc.sparkContext,
-      testData, allDictFile, csvHeader, dictCol)
-    // Specify date format based on raw data
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-
-    cc.sql("""
-           CREATE TABLE IF NOT EXISTS t3
-           (ID Int, date Date, country String,
-           name String, phonetype String, serialname String, salary Int,floatField float)
-           STORED BY 'carbondata'
-           """)
-
-    cc.sql(s"""
-           LOAD DATA LOCAL INPATH '$testData' into table t3
-           options('ALL_DICTIONARY_PATH'='$allDictFile')
-           """)
-
-    cc.sql("""
-           SELECT * FROM t3
-           """).show()
-
-    cc.sql("""
-           SELECT * FROM t3 where floatField=3.5
-           """).show()
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-
-    // clean local dictionary files
-    AllDictionaryUtil.cleanDictionary(allDictFile)
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
deleted file mode 100644
index 12901b5..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datastore.impl.FileFactory
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.examples.util.ExampleUtils
-
-/**
- * configure alluxio:
- * 1.start alluxio
- * 2.upload the jar :"/alluxio_path/core/client/target/
- * alluxio-core-client-YOUR-VERSION-jar-with-dependencies.jar"
- * 3.Get more detail at:http://www.alluxio.org/docs/master/en/Running-Spark-on-Alluxio.html
- */
-
-object AlluxioExample {
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("AlluxioExample")
-    cc.sparkContext.hadoopConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
-    FileFactory.getConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
-
-    // Specify date format based on raw data
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-
-    cc.sql("""
-           CREATE TABLE IF NOT EXISTS t3
-           (ID Int, date Date, country String,
-           name String, phonetype String, serialname String, salary Int)
-           STORED BY 'carbondata'
-           """)
-
-    cc.sql(s"""
-           LOAD DATA LOCAL INPATH 'alluxio://localhost:19998/data.csv' into table t3
-           """)
-
-    cc.sql("""
-           SELECT country, count(salary) AS amount
-           FROM t3
-           WHERE country IN ('china','france')
-           GROUP BY country
-           """).show()
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
deleted file mode 100644
index 36013a8..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonExample.scala
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.examples.util.ExampleUtils
-
-object CarbonExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("CarbonExample")
-    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
-
-    // Specify date format based on raw data
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-
-    // Create table, 6 dimensions, 1 measure
-    cc.sql("""
-           CREATE TABLE IF NOT EXISTS t3
-           (ID Int, date Date, country String,
-           name String, phonetype String, serialname char(10), salary Int)
-           STORED BY 'carbondata'
-           """)
-
-    // Load data
-    cc.sql(s"""
-           LOAD DATA LOCAL INPATH '$testData' into table t3
-           """)
-
-    // Perform a query
-    cc.sql("""
-           SELECT country, count(salary) AS amount
-           FROM t3
-           WHERE country IN ('china','france')
-           GROUP BY country
-           """).show()
-
-    // Drop table
-    cc.sql("DROP TABLE IF EXISTS t3")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonPartitionExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonPartitionExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonPartitionExample.scala
deleted file mode 100644
index 9ceadea..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/CarbonPartitionExample.scala
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import scala.collection.mutable.LinkedHashMap
-
-import org.apache.spark.sql.AnalysisException
-
-import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.examples.util.ExampleUtils
-
-object CarbonPartitionExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("CarbonPartitionExample")
-    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
-    // none partition table
-    cc.sql("DROP TABLE IF EXISTS t0")
-    cc.sql("""
-                | CREATE TABLE IF NOT EXISTS t0
-                | (
-                | vin String,
-                | logdate Timestamp,
-                | phonenumber Int,
-                | country String,
-                | area String
-                | )
-                | STORED BY 'carbondata'
-              """.stripMargin)
-
-    // range partition
-    cc.sql("DROP TABLE IF EXISTS t1")
-    cc.sql("""
-                | CREATE TABLE IF NOT EXISTS t1(
-                | vin STRING,
-                | phonenumber INT,
-                | country STRING,
-                | area STRING
-                | )
-                | PARTITIONED BY (logdate TIMESTAMP)
-                | STORED BY 'carbondata'
-                | TBLPROPERTIES('PARTITION_TYPE'='RANGE',
-                | 'RANGE_INFO'='2014/01/01,2015/01/01,2016/01/01')
-              """.stripMargin)
-
-    // hash partition
-    cc.sql("""
-                | CREATE TABLE IF NOT EXISTS t3(
-                | logdate Timestamp,
-                | phonenumber Int,
-                | country String,
-                | area String
-                | )
-                | PARTITIONED BY (vin String)
-                | STORED BY 'carbondata'
-                | TBLPROPERTIES('PARTITION_TYPE'='HASH','NUM_PARTITIONS'='5')
-                """.stripMargin)
-
-    // list partition
-    cc.sql("DROP TABLE IF EXISTS t5")
-    cc.sql("""
-               | CREATE TABLE IF NOT EXISTS t5(
-               | vin String,
-               | logdate Timestamp,
-               | phonenumber Int,
-               | area String
-               | )
-               | PARTITIONED BY (country string)
-               | STORED BY 'carbondata'
-               | TBLPROPERTIES('PARTITION_TYPE'='LIST',
-               | 'LIST_INFO'='(China,United States),UK ,japan,(Canada,Russia), South Korea ')
-       """.stripMargin)
-
-    cc.sql(s"DROP TABLE IF EXISTS partitionDB.t9")
-    cc.sql(s"DROP DATABASE IF EXISTS partitionDB")
-    cc.sql(s"CREATE DATABASE partitionDB")
-    cc.sql(s"""
-                | CREATE TABLE IF NOT EXISTS partitionDB.t9(
-                | logdate Timestamp,
-                | phonenumber Int,
-                | country String,
-                | area String
-                | )
-                | PARTITIONED BY (vin String)
-                | STORED BY 'carbondata'
-                | TBLPROPERTIES('PARTITION_TYPE'='HASH','NUM_PARTITIONS'='5')
-                """.stripMargin)
-    // hive partition table
-    cc.sql("DROP TABLE IF EXISTS t7")
-    cc.sql("""
-       | create table t7(id int, name string) partitioned by (city string)
-       | row format delimited fields terminated by ','
-       """.stripMargin)
-    cc.sql("alter table t7 add partition (city = 'Hangzhou')")
-    // hive partition table
-    cc.sql(s"DROP TABLE IF EXISTS hiveDB.t7")
-    cc.sql(s"CREATE DATABASE IF NOT EXISTS hiveDB")
-    cc.sql("""
-       | create table hiveDB.t7(id int, name string) partitioned by (city string)
-       | row format delimited fields terminated by ','
-       """.stripMargin)
-    cc.sql("alter table hiveDB.t7 add partition (city = 'Shanghai')")
-    //  show partitions
-    try {
-      cc.sql("SHOW PARTITIONS t0").show(100, false)
-    } catch {
-      case ex: AnalysisException => LOGGER.error(ex.getMessage())
-    }
-    cc.sql("SHOW PARTITIONS t1").show(100, false)
-    cc.sql("SHOW PARTITIONS t3").show(100, false)
-    cc.sql("SHOW PARTITIONS t5").show(100, false)
-    cc.sql("SHOW PARTITIONS t7").show(100, false)
-    cc.sql("use hiveDB").show()
-    cc.sql("SHOW PARTITIONS t7").show(100, false)
-    cc.sql("use default").show()
-    cc.sql("SHOW PARTITIONS partitionDB.t9").show(100, false)
-
-    cc.sql("DROP TABLE IF EXISTS t0")
-    cc.sql("DROP TABLE IF EXISTS t1")
-    cc.sql("DROP TABLE IF EXISTS t3")
-    cc.sql("DROP TABLE IF EXISTS t5")
-    cc.sql("DROP TABLE IF EXISTS t7")
-    cc.sql(s"DROP TABLE IF EXISTS hiveDb.t7")
-    cc.sql(s"DROP TABLE IF EXISTS partitionDB.t9")
-    cc.sql(s"DROP DATABASE IF EXISTS partitionDB")
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
deleted file mode 100644
index 21bd002..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
+++ /dev/null
@@ -1,49 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.spark.rdd.RDD
-import org.apache.spark.sql.{DataFrame, SaveMode}
-
-import org.apache.carbondata.examples.util.ExampleUtils
-
-case class People(name: String, occupation: String, id: Int)
-
-object CaseClassDataFrameAPIExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("CaseClassDataFrameAPIExample")
-    import cc.implicits._
-
-    val people = List(People("sangeeta", "engineer", 1), People("pallavi", "consultant", 2))
-    val peopleRDD: RDD[People] = cc.sc.parallelize(people)
-    val peopleDF: DataFrame = peopleRDD.toDF("name", "occupation", "id")
-
-    // writing data to carbon table
-    peopleDF.write
-      .format("carbondata")
-      .option("tableName", "carbon2")
-      .option("compress", "true")
-      .mode(SaveMode.Overwrite)
-      .save()
-
-    cc.sql("SELECT * FROM carbon2").show()
-
-    cc.sql("DROP TABLE IF EXISTS carbon2")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
deleted file mode 100644
index 992c3f9..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/ComplexTypeExample.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.examples.util.ExampleUtils
-
-/**
- * Carbon supports the complex types ARRAY and STRUCT.
- * The complex type columns can be used with all SQL clauses.
- */
-object ComplexTypeExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("ComplexTypeExample")
-    val dataPath = ExampleUtils.currentPath + "/src/main/resources/complexdata.csv"
-    val tableName = "complexTypeTable"
-
-    cc.sql(s"DROP TABLE IF EXISTS $tableName")
-    cc.sql(s"""CREATE TABLE $tableName (
-                 deviceInformationId int,
-                 channelsId string,
-                 ROMSize string,
-                 purchasedate string,
-                 mobile struct<imei:string,
-                              imsi:string>,
-                 MAC array<string>,
-                 locationinfo array<struct<ActiveAreaId:int,
-                                           ActiveCountry:string,
-                                           ActiveProvince:string,
-                                           Activecity:string,
-                                           ActiveDistrict:string,
-                                           ActiveStreet:string>>,
-                  proddate struct<productionDate: string,
-                                 activeDeactivedate: array<string>>,
-                  gamePointId double,
-                  contractNumber double)
-              STORED BY 'org.apache.carbondata.format' """)
-
-    cc.sql(s"load data local inpath '$dataPath' into table $tableName " +
-      "options ('COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')")
-
-    // filter on complex ARRAY type with index filter
-    cc.sql(s"SELECT mobile, proddate.activeDeactivedate, MAC[0] FROM $tableName " +
-      "WHERE MAC[0] LIKE 'MAC1%'").show
-
-    // filter on complex STRUCT type
-    cc.sql(s"SELECT mobile, proddate.activeDeactivedate FROM $tableName " +
-      "WHERE mobile.imei = '1AA1' or mobile.imsi = ''").show
-
-    // filter on complex STRUCT<ARRAY>
-    cc.sql(s"SELECT mobile, proddate.activeDeactivedate[0] FROM $tableName " +
-      "WHERE proddate.activeDeactivedate[0] = '29-11-2015'").show
-
-    // filter on complex ARRAY<STRUCT>
-    cc.sql(s"SELECT mobile, locationinfo[0] FROM $tableName " +
-      "WHERE locationinfo[0].ActiveCountry = 'Chinese'").show
-
-    // complex type aggregation and group by complex type
-    cc.sql(s"SELECT mobile, count(proddate) FROM $tableName GROUP BY mobile").show
-
-    cc.sql(s"DROP TABLE IF EXISTS $tableName")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
deleted file mode 100644
index db5def9..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.examples.util.ExampleUtils
-
-// scalastyle:off println
-object DataFrameAPIExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("DataFrameAPIExample")
-    ExampleUtils.writeSampleCarbonFile(cc, "carbon1", 1000)
-
-    // use datasource api to read
-    val in = cc.read
-      .format("carbondata")
-      .option("tableName", "carbon1")
-      .load()
-
-    import cc.implicits._
-    var count = in.where($"c3" > 500).select($"*").count()
-    println(s"count after 1 load: $count")
-
-    // append new data, query answer should be 1000
-    ExampleUtils.appendSampleCarbonFile(cc, "carbon1")
-    count = in.where($"c3" > 500).select($"*").count()
-    println(s"count after 2 load: $count")
-
-    // use SQL to read
-    cc.sql("SELECT c1, count(c3) FROM carbon1 where c3 > 500 group by c1 limit 10").show
-
-    // delete carbondata file
-    ExampleUtils.cleanSampleCarbonFile(cc, "carbon1")
-  }
-}
-// scalastyle:on println

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
deleted file mode 100644
index 551a008..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.carbondata.examples.util.ExampleUtils
-
-object DataManagementExample {
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("DataManagementExample")
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-
-    // create a table using CarbonData
-    cc.sql(
-      """
-           CREATE TABLE IF NOT EXISTS t3
-           (ID Int, date Timestamp, country String,
-           name String, phonetype String, serialname String, salary Int)
-           STORED BY 'carbondata'
-      """
-    )
-
-    // data.csv has 1000 lines
-    val testData = ExampleUtils.currentPath + "/src/main/resources/data.csv"
-
-    // load data 5 times, each load of data is called a segment in CarbonData
-    (1 to 5).map { i =>
-      cc.sql(s"LOAD DATA LOCAL INPATH '$testData' into table t3")
-    }
-    cc.sql("SHOW SEGMENTS FOR TABLE t3 ").show
-
-    // delete the first segment
-    cc.sql("DELETE FROM TABLE T3 WHERE SEGMENT.ID IN (0)")
-    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
-
-    // this query will be executed on last 4 segments, it should return 4000 rows
-    cc.sql("SELECT count(*) AS amount FROM t3").show
-
-    // force a major compaction to compact all segments into one
-    cc.sql("ALTER TABLE t3 COMPACT 'MAJOR' ")
-    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
-
-    // load again, add another 1000 rows
-    cc.sql(s"LOAD DATA LOCAL INPATH '$testData' into table t3")
-    cc.sql("SHOW SEGMENTS FOR TABLE t3 LIMIT 10").show
-
-    // this query will be executed on 2 segments, it should return 5000 rows
-    cc.sql("SELECT count(*) AS amount FROM t3").show
-
-    // delete all segments whose loading time is before '2099-01-01 01:00:00'
-    cc.sql("DELETE FROM TABLE T3 WHERE SEGMENT.STARTTIME BEFORE '2099-01-01 01:00:00'")
-    cc.sql("SHOW SEGMENTS FOR TABLE t3 ").show
-
-    // this query will be executed on 0 segments, it should return 0 rows
-    cc.sql("SELECT count(*) AS amount FROM t3").show
-
-    // force clean up all 'MARKED_FOR_DELETE' and 'COMPACTED' segments immediately
-    cc.sql("CLEAN FILES FOR TABLE t3")
-    cc.sql("SHOW SEGMENTS FOR TABLE t3").show
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
deleted file mode 100644
index 830a819..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
+++ /dev/null
@@ -1,185 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import java.io.File
-import java.text.SimpleDateFormat
-
-import org.apache.spark.{SparkConf, SparkContext}
-import org.apache.spark.sql.{CarbonContext, DataFrame, Row, SaveMode, SQLContext}
-import org.apache.spark.sql.types.{DataTypes, StructField, StructType}
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.examples.util.ExampleUtils
-
-object DataUpdateDeleteExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("DataUpdateDeleteExample")
-
-    // for local files
-    var rootPath = ExampleUtils.currentPath
-    // for hdfs files
-    // var rootPath = "hdfs://hdfs-host/carbon"
-
-    val testData = rootPath + "/src/main/resources/data.csv"
-
-    // Specify date format based on raw data
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-
-    cc.sql("DROP TABLE IF EXISTS t3")
-    cc.sql("DROP TABLE IF EXISTS t5")
-
-    // Create table, 6 dimensions, 1 measure
-    cc.sql("""
-           CREATE TABLE IF NOT EXISTS t3
-           (id Int, date Date, country String,
-           name String, phonetype String, serialname char(10), salary Int)
-           STORED BY 'carbondata'
-           """)
-
-    cc.sql(s"""
-           LOAD DATA LOCAL INPATH '$testData' INTO TABLE t3
-           """)
-
-    // Specify date format based on raw data
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy-MM-dd")
-
-    // Simulate data and write to table t5
-    var fields = Seq[StructField]()
-    fields = fields :+ DataTypes.createStructField("t5_id", DataTypes.IntegerType, false)
-    fields = fields :+ DataTypes.createStructField("t5_date", DataTypes.DateType, false)
-    fields = fields :+ DataTypes.createStructField("t5_country", DataTypes.StringType, false)
-    fields = fields :+ DataTypes.createStructField("t5_name", DataTypes.StringType, false)
-    fields = fields :+ DataTypes.createStructField("t5_phonetype", DataTypes.StringType, false)
-    fields = fields :+ DataTypes.createStructField("t5_serialname", DataTypes.StringType, false)
-    fields = fields :+ DataTypes.createStructField("t5_salary", DataTypes.IntegerType, false)
-    var schema = StructType(fields)
-    var sdf = new SimpleDateFormat("yyyy-MM-dd")
-    var data = cc.sparkContext.parallelize(1 to 10).map { x =>
-      val day = x % 20 + 1
-      var dateStr = ""
-      if (day >= 10) {
-        dateStr = "2017-07-" + day
-      } else {
-        dateStr = "2017-07-0" + day
-      }
-      val dt = new java.sql.Date(sdf.parse(dateStr).getTime);
-      var row = Seq[Any]()
-      row = row :+ x
-      row = row :+ dt
-      row = row :+ "china"
-      row = row :+ "bbb" + x
-      row = row :+ "phone" + 100 * x
-      row = row :+ "ASD" + (1000 * x - x)
-      row = row :+ (25000 + x)
-      Row.fromSeq(row)
-    }
-    var df = cc.createDataFrame(data, schema)
-    df.write
-      .format("carbondata")
-      .option("tableName", "t5")
-      .option("tempCSV", "true")
-      .option("compress", "true")
-      .mode(SaveMode.Overwrite)
-      .save()
-    cc.sql("""
-           SELECT * FROM t5 ORDER BY t5_id
-           """).show()
-
-    // 1.Update data with simple SET
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // Update data where salary < 15003
-    val dateStr = "2018-08-08"
-    cc.sql(s"""
-           UPDATE t3 SET (t3.date, t3.country) = ('$dateStr', 'india') WHERE t3.salary < 15003
-           """).show()
-    // Query data again after the above update
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    cc.sql("""
-           UPDATE t3 SET (t3.salary) = (t3.salary + 9) WHERE t3.name = 'aaa1'
-           """).show()
-    // Query data again after the above update
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // 2.Update data with subquery result SET
-    cc.sql("""
-         UPDATE t3
-         SET (t3.country, t3.name) = (SELECT t5_country, t5_name FROM t5 WHERE t5_id = 5)
-         WHERE t3.id < 5""").show()
-    cc.sql("""
-         UPDATE t3
-         SET (t3.date, t3.serialname, t3.salary) =
-         (SELECT '2099-09-09', t5_serialname, '9999' FROM t5  WHERE t5_id = 5)
-         WHERE t3.id < 5""").show()
-
-    // Query data again after the above update
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // 3.Update data with join query result SET
-    cc.sql("""
-         UPDATE t3
-         SET (t3.country, t3.salary) =
-         (SELECT t5_country, t5_salary FROM t5 FULL JOIN t3 u
-         WHERE u.id = t5_id and t5_id=6) WHERE t3.id >6""").show()
-
-    // Query data again after the above update
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // 4.Delete data where salary > 15005
-    cc.sql("""
-           DELETE FROM t3 WHERE t3.salary > 15005
-           """).show()
-
-    // Query data again after delete data
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // 5.Delete data WHERE id in (1, 2, $key)
-    var key = 3
-    cc.sql(s"""
-           DELETE FROM t3 WHERE t3.id in (1, 2, $key)
-           """).show()
-
-    // Query data again after delete data
-    cc.sql("""
-           SELECT * FROM t3 ORDER BY t3.id
-           """).show()
-
-    // Drop table
-    cc.sql("DROP TABLE IF EXISTS t3")
-    cc.sql("DROP TABLE IF EXISTS t5")
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
deleted file mode 100644
index a3af2c3..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/DatasourceExample.scala
+++ /dev/null
@@ -1,44 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.spark.sql.{SaveMode, SQLContext}
-
-import org.apache.carbondata.examples.util.ExampleUtils
-
-object DatasourceExample {
-
-  def main(args: Array[String]) {
-    // use CarbonContext to write CarbonData files
-    val cc = ExampleUtils.createCarbonContext("DatasourceExample")
-    ExampleUtils.writeSampleCarbonFile(cc, "table1")
-
-    // Use SQLContext to read CarbonData files
-    val sqlContext = new SQLContext(cc.sparkContext)
-    sqlContext.sql(
-      s"""
-        | CREATE TEMPORARY TABLE source
-        | USING org.apache.spark.sql.CarbonSource
-        | OPTIONS (path '${cc.storePath}/default/table1')
-      """.stripMargin)
-    sqlContext.sql("SELECT c1, c2, count(*) FROM source WHERE c3 > 100 GROUP BY c1, c2").show
-
-    // delete carbondata file
-    ExampleUtils.cleanSampleCarbonFile(cc, "table1")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
deleted file mode 100644
index 6a66b93..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.spark.sql.SQLContext
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.examples.util.ExampleUtils
-
-/**
- * This example needs Spark 1.6 or later version to run
- */
-object DirectSQLExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("DirectSQLExample")
-    ExampleUtils.writeSampleCarbonFile(cc, "table1")
-
-    // Use SQLContext to read CarbonData files without creating table
-    val sqlContext = new SQLContext(cc.sparkContext)
-    sqlContext.sql(
-      s"""
-        | SELECT c1, c2, count(*)
-        | FROM carbondata.`${cc.storePath}/${CarbonCommonConstants.DATABASE_DEFAULT_NAME}/table1`
-        | WHERE c3 > 100
-        | GROUP BY c1, c2
-      """.stripMargin).show
-
-    // delete carbondata file
-    ExampleUtils.cleanSampleCarbonFile(cc, "table1")
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
deleted file mode 100644
index 94d35b0..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/GenerateDictionaryExample.scala
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.spark.sql.{CarbonContext, CarbonEnv, CarbonRelation}
-
-import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.metadata.CarbonTableIdentifier
-import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension
-import org.apache.carbondata.core.util.path.CarbonStorePath
-import org.apache.carbondata.examples.util.ExampleUtils
-import org.apache.carbondata.processing.util.CarbonLoaderUtil
-
-/**
- * example for global dictionary generation
- * pls check files under directory of target/store/default/dictSample/Metadata
- * and verify global dictionary values
- */
-object GenerateDictionaryExample {
-
-  def main(args: Array[String]) {
-    val cc = ExampleUtils.createCarbonContext("GenerateDictionaryExample")
-    val factFilePath = ExampleUtils.currentPath + "/src/main/resources/factSample.csv"
-    val carbonTablePath = CarbonStorePath.getCarbonTablePath(ExampleUtils.storeLocation,
-      new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "dictSample", "1"))
-    val dictFolderPath = carbonTablePath.getMetadataDirectoryPath
-
-    // execute sql statement
-    cc.sql("DROP TABLE IF EXISTS dictSample")
-
-    cc.sql("""
-           CREATE TABLE IF NOT EXISTS dictSample(id Int, name String, city String, salary Int)
-           STORED BY 'org.apache.carbondata.format'
-           """)
-
-    cc.sql(s"""
-           LOAD DATA LOCAL INPATH '$factFilePath' INTO TABLE dictSample
-           """)
-
-    // check generated dictionary
-    val tableIdentifier =
-      new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "dictSample", "1")
-    printDictionary(cc, tableIdentifier, dictFolderPath)
-  }
-
-  def printDictionary(cc: CarbonContext, carbonTableIdentifier: CarbonTableIdentifier,
-                      dictFolderPath: String) {
-    val dataBaseName = carbonTableIdentifier.getDatabaseName
-    val tableName = carbonTableIdentifier.getTableName
-    val carbonRelation = CarbonEnv.get.carbonMetastore.lookupRelation1(Option(dataBaseName),
-        tableName)(cc).asInstanceOf[CarbonRelation]
-    val carbonTable = carbonRelation.tableMeta.carbonTable
-    val dimensions = carbonTable.getDimensionByTableName(tableName.toLowerCase())
-      .toArray.map(_.asInstanceOf[CarbonDimension])
-    // scalastyle:off println
-    // print dictionary information
-    println("**********************************************************************************")
-    println(s"table:$tableName in " + s"database:$dataBaseName")
-    for (dimension <- dimensions) {
-      println("**********************************************************************************")
-      println(s"dictionary of dimension: ${dimension.getColName}")
-      println(s"Key\t\t\tValue")
-      val columnIdentifier = new DictionaryColumnUniqueIdentifier(carbonTableIdentifier,
-        dimension.getColumnIdentifier, dimension.getDataType,
-        CarbonStorePath
-          .getCarbonTablePath(carbonTable.getStorePath, carbonTable.getCarbonTableIdentifier))
-      val dict = CarbonLoaderUtil.getDictionary(columnIdentifier, cc.storePath)
-      var index: Int = 1
-      var distinctValue = dict.getDictionaryValueForKey(index)
-      while (distinctValue != null) {
-        println(index + s"\t\t\t" + distinctValue)
-        index += 1
-        distinctValue = dict.getDictionaryValueForKey(index)
-      }
-    }
-    println("**********************************************************************************")
-    // scalastyle:on println
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/0bf597d9/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala b/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
deleted file mode 100644
index d471ab6..0000000
--- a/examples/spark/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.examples
-
-import org.apache.hadoop.conf.Configuration
-
-import org.apache.carbondata.examples.util.ExampleUtils
-import org.apache.carbondata.hadoop.{CarbonInputFormat, CarbonProjection}
-
-// scalastyle:off println
-object HadoopFileExample {
-
-  def main(args: Array[String]): Unit = {
-    val cc = ExampleUtils.createCarbonContext("HadoopFileExample")
-    ExampleUtils.writeSampleCarbonFile(cc, "carbon1")
-
-    // read two columns
-    val projection = new CarbonProjection
-    projection.addColumn("c1")  // column c1
-    projection.addColumn("c3")  // column c3
-    val conf = new Configuration()
-    CarbonInputFormat.setColumnProjection(conf, projection)
-
-    val sc = cc.sparkContext
-    val input = sc.newAPIHadoopFile(s"${cc.storePath}/default/carbon1",
-      classOf[CarbonInputFormat[Array[Object]]],
-      classOf[Void],
-      classOf[Array[Object]],
-      conf)
-    val result = input.map(x => x._2.toList).collect
-    result.foreach(x => println(x.mkString(", ")))
-
-    // delete carbondata file
-    ExampleUtils.cleanSampleCarbonFile(cc, "carbon1")
-  }
-}
-// scalastyle:on println
-