You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@carbondata.apache.org by ch...@apache.org on 2017/07/23 01:46:51 UTC
carbondata git commit: [CARBONDATA-1310]Merge test code for
AddColumnTestCases, DropColumnTestCases and ChangeDataTypeTestCases
Repository: carbondata
Updated Branches:
refs/heads/master c8355b5de -> cf2a829db
[CARBONDATA-1310]Merge test code for AddColumnTestCases, DropColumnTestCases and ChangeDataTypeTestCases
This closes #1186
Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/cf2a829d
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/cf2a829d
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/cf2a829d
Branch: refs/heads/master
Commit: cf2a829db49be36a746a4460d03d5b1f0de909c4
Parents: c8355b5
Author: mayun <si...@163.com>
Authored: Wed Jul 19 17:42:19 2017 +0800
Committer: chenliang613 <ch...@apache.org>
Committed: Sun Jul 23 09:46:38 2017 +0800
----------------------------------------------------------------------
.../rowreader/AddColumnTestCases.scala | 235 ---------
.../rowreader/ChangeDataTypeTestCases.scala | 125 -----
.../rowreader/DropColumnTestCases.scala | 87 ----
.../vectorreader/AddColumnTestCases.scala | 495 +++++++++++++------
.../vectorreader/ChangeDataTypeTestCases.scala | 170 ++++---
.../vectorreader/DropColumnTestCases.scala | 100 ++--
6 files changed, 510 insertions(+), 702 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
deleted file mode 100644
index 2a199ef..0000000
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/AddColumnTestCases.scala
+++ /dev/null
@@ -1,235 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.carbondata.restructure.rowreader
-
-import java.io.{File, FileOutputStream, FileWriter}
-import java.math.{BigDecimal, RoundingMode}
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.Spark2QueryTest
-import org.apache.spark.sql.test.TestQueryExecutor
-import org.scalatest.BeforeAndAfterAll
-
-class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
-
- override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "false")
- sql("DROP TABLE IF EXISTS addcolumntest")
- sql("drop table if exists hivetable")
- sql(
- "CREATE TABLE addcolumntest(intField int,stringField string,timestampField timestamp," +
- "decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE addcolumntest " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- sql(
- "Alter table addcolumntest add columns(charField string) TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField', 'DEFAULT.VALUE.charfield'='def')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE addcolumntest " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("CREATE TABLE hivetable stored as parquet select * from addcolumntest")
- }
-
- test("test like query on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField like 'd%'"), Row("def"))
- }
-
- test("test is not null filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField is not null"),
- Seq(Row("abc"), Row("def")))
- }
-
- test("test is null filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField is null"), Seq())
- }
-
- test("test equals filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField = 'abc'"), Row("abc"))
- }
-
- test("test add dictionary column and test greaterthan/lessthan filter on new column") {
- sql(
- "Alter table addcolumntest add columns(intnewField int) TBLPROPERTIES" +
- "('DICTIONARY_INCLUDE'='intnewField', 'DEFAULT.VALUE.intNewField'='5')")
- checkAnswer(sql("select charField from addcolumntest where intnewField > 2"),
- Seq(Row("abc"), Row("def")))
- checkAnswer(sql("select charField from addcolumntest where intnewField < 2"), Seq())
- }
-
- test("test compaction after adding new column") {
- sql("Alter table addcolumntest compact 'major'")
- checkExistence(sql("show segments for table addcolumntest"), true, "0Compacted")
- checkExistence(sql("show segments for table addcolumntest"), true, "1Compacted")
- checkExistence(sql("show segments for table addcolumntest"), true, "0.1Success")
- checkAnswer(sql("select charField from addcolumntest"), Seq(Row("abc"), Row("def")))
- }
-
- test("test add msr column and check aggregate") {
- sql(
- "alter table addcolumntest add columns(msrField decimal(5,2))TBLPROPERTIES ('DEFAULT.VALUE" +
- ".msrfield'= '123.45')")
- checkAnswer(sql("select sum(msrField) from addcolumntest"),
- Row(new BigDecimal("246.90").setScale(2, RoundingMode.HALF_UP)))
- }
-
- test("test join on new column") {
- checkAnswer(sql(
- "select t1.charField, t2.charField from addcolumntest t1, hivetable t2 where t1.charField =" +
- " t2.charField"),
- Seq(Row("abc", "abc"), Row("def", "def")))
- }
-
- test("test add and drop column with data loading") {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table drop columns(timestampField)")
- sql("select * from carbon_table").collect
- sql("Alter table carbon_table add columns(timestampField timestamp)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data5.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,decimalField,timestampField')")
- sql("DROP TABLE IF EXISTS carbon_table")
- }
-
- test("test add/drop and change datatype") {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table drop columns(charField)")
- sql("select * from carbon_table").collect
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- sql(
- "Alter table carbon_table add columns(charField string) TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data2.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
- sql("select * from carbon_table").collect
- sql("ALTER TABLE carbon_table CHANGE decimalField decimalField decimal(22,6)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data3.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
- sql("DROP TABLE IF EXISTS carbon_table")
- }
-
-
- test("test add column compaction") {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table add columns(decimalField decimal(6,2))")
-
- sql("Alter table carbon_table compact 'minor'")
-
- sql("DROP TABLE IF EXISTS carbon_table")
- }
-
- test("test to add column with char datatype") {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table add columns(newfield char(10)) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='char')")
- checkAnswer(sql("select distinct(newfield) from carbon_table"),Row("char"))
- sql("DROP TABLE IF EXISTS carbon_table")
- }
-
- test("test to check if exception is thrown with wrong char syntax") {
- intercept[Exception] {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
-
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(
- "Alter table carbon_table add columns(newfield char) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
- sql("DROP TABLE IF EXISTS carbon_table")
- }
- }
-
- test("test to add column with varchar datatype") {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table add columns(newfield varchar(10)) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='char')")
- checkAnswer(sql("select distinct(newfield) from carbon_table"),Row("char"))
- sql("DROP TABLE IF EXISTS carbon_table")
- }
-
- test("test to check if exception is thrown with wrong varchar syntax") {
- intercept[Exception] {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
-
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- sql(
- "Alter table carbon_table add columns(newfield varchar) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
- sql("DROP TABLE IF EXISTS carbon_table")
- }
- }
-
- test ("test to check if exception is thrown if table is locked for updation") {
- intercept[Exception] {
- sql("DROP TABLE IF EXISTS carbon_table")
- sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp)STORED BY 'carbondata' TBLPROPERTIES" +
- "('DICTIONARY_EXCLUDE'='charField')")
- val lockFilePath = s"${ TestQueryExecutor.storeLocation }/default/carbon_table/meta.lock"
- new File(lockFilePath).createNewFile()
- sql(
- "Alter table carbon_table add columns(newfield string) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
- new FileOutputStream(lockFilePath).getChannel.lock()
- sql(
- "Alter table carbon_table drop columns(newfield)")
- new File(lockFilePath).delete()
- sql("DROP TABLE IF EXISTS carbon_table")
- }
- }
-
- override def afterAll {
- sql("DROP TABLE IF EXISTS addcolumntest")
- sql("drop table if exists hivetable")
- }
-}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
deleted file mode 100644
index fc2da21..0000000
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/ChangeDataTypeTestCases.scala
+++ /dev/null
@@ -1,125 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.carbondata.restructure.rowreader
-
-import java.math.BigDecimal
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.Spark2QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-class ChangeDataTypeTestCases extends Spark2QueryTest with BeforeAndAfterAll {
-
- override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "false")
- sql("DROP TABLE IF EXISTS changedatatypetest")
- sql("drop table if exists hivetable")
- }
-
- test("test change datatype on existing column and load data, insert into hive table") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(
- "CREATE TABLE hivetable(intField bigint,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2)) stored as parquet")
- sql("insert into table hivetable select * from changedatatypetest")
- afterAll
- }
-
- test("test datatype change and filter") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select charField from changedatatypetest where intField > 99"),
- Seq(Row("abc"), Row("abc")))
- checkAnswer(sql("select charField from changedatatypetest where intField < 99"), Seq())
- checkAnswer(sql("select charField from changedatatypetest where intField = 100"),
- Seq(Row("abc"), Row("abc")))
- afterAll
- }
-
-
- test("test change int datatype and load data") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select sum(intField) from changedatatypetest"), Row(200))
- afterAll
- }
-
- test("test change decimal datatype and compaction") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change decimalField decimalField decimal(9,5)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select decimalField from changedatatypetest"),
- Seq(Row(new BigDecimal("21.23").setScale(5)), Row(new BigDecimal("21.23").setScale(5))))
- sql("alter table changedatatypetest compact 'major'")
- checkExistence(sql("show segments for table changedatatypetest"), true, "0Compacted")
- checkExistence(sql("show segments for table changedatatypetest"), true, "1Compacted")
- checkExistence(sql("show segments for table changedatatypetest"), true, "0.1Success")
- afterAll
- }
-
- test("test to change int datatype to long") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intField long")
- checkAnswer(sql("select intField from changedatatypetest limit 1"), Row(100))
- afterAll
- }
-
- override def afterAll {
- sql("DROP TABLE IF EXISTS changedatatypetest")
- sql("drop table if exists hivetable")
- }
-}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/DropColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/DropColumnTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/DropColumnTestCases.scala
deleted file mode 100644
index a812548..0000000
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/rowreader/DropColumnTestCases.scala
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.spark.carbondata.restructure.rowreader
-
-import java.math.{BigDecimal, RoundingMode}
-
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.common.util.Spark2QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.util.CarbonProperties
-
-class DropColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
-
- override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "false")
- sql("DROP TABLE IF EXISTS dropcolumntest")
- sql("drop table if exists hivetable")
- }
-
- test("test drop column and insert into hive table") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(
- "CREATE TABLE hivetable(intField int,stringField string,timestampField timestamp," +
- "decimalField decimal(6,2)) stored as parquet")
- sql("insert into table hivetable select * from dropcolumntest")
- checkAnswer(sql("select * from hivetable"), sql("select * from dropcolumntest"))
- afterAll
- }
-
- test("test drop column and load data") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- checkAnswer(sql("select count(*) from dropcolumntest"), Row(2))
- afterAll
- }
-
- test("test drop column and compaction") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- sql("alter table dropcolumntest compact 'major'")
- checkExistence(sql("show segments for table dropcolumntest"), true, "0Compacted")
- checkExistence(sql("show segments for table dropcolumntest"), true, "1Compacted")
- checkExistence(sql("show segments for table dropcolumntest"), true, "0.1Success")
- afterAll
- }
-
- override def afterAll {
- sql("DROP TABLE IF EXISTS dropcolumntest")
- sql("drop table if exists hivetable")
- }
-}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
index 5e2f729..7f836f9 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
@@ -17,11 +17,13 @@
package org.apache.spark.carbondata.restructure.vectorreader
+import java.io.{File, FileOutputStream, FileWriter}
import java.math.{BigDecimal, RoundingMode}
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.Row
import org.apache.spark.sql.common.util.Spark2QueryTest
+import org.apache.spark.sql.test.TestQueryExecutor
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
@@ -29,357 +31,542 @@ import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS addcolumntest")
- sql("drop table if exists hivetable")
+ sql("DROP TABLE IF EXISTS hivetable")
sql(
- "CREATE TABLE addcolumntest(intField int,stringField string,timestampField timestamp," +
- "decimalField decimal(6,2)) STORED BY 'carbondata'")
+ "CREATE TABLE addcolumntest(intField INT,stringField STRING,timestampField TIMESTAMP," +
+ "decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE addcolumntest " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField')")
sql(
- "Alter table addcolumntest add columns(charField string) TBLPROPERTIES" +
+ "ALTER TABLE addcolumntest ADD COLUMNS(charField STRING) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField', 'DEFAULT.VALUE.charfield'='def')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE addcolumntest " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE hivetable STORED AS PARQUET SELECT * FROM addcolumntest")
}
test("test like query on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField like 'd%'"), Row("def"))
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField LIKE 'd%'"), Row("def"))
+
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField LIKE 'd%'"), Row("def"))
}
test("test is not null filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField is not null"),
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField IS NOT NULL"),
+ Seq(Row("abc"), Row("def")))
+
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField IS NOT NULL"),
Seq(Row("abc"), Row("def")))
}
test("test is null filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField is null"), Seq())
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField IS NULL"), Seq())
+
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField IS NULL"), Seq())
}
test("test equals filter on new column") {
- checkAnswer(sql("select charField from addcolumntest where charField = 'abc'"), Row("abc"))
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField = 'abc'"), Row("abc"))
+
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE charField = 'abc'"), Row("abc"))
}
test("test add dictionary column and test greaterthan/lessthan filter on new column") {
- sql(
- "Alter table addcolumntest add columns(intnewField int) TBLPROPERTIES" +
- "('DICTIONARY_INCLUDE'='intnewField', 'DEFAULT.VALUE.intNewField'='5')")
- checkAnswer(sql("select charField from addcolumntest where intnewField > 2"),
- Seq(Row("abc"), Row("def")))
- checkAnswer(sql("select charField from addcolumntest where intnewField < 2"), Seq())
+ def test_add_and_filter() = {
+ sql(
+ "ALTER TABLE addcolumntest ADD COLUMNS(intnewField INT) TBLPROPERTIES" +
+ "('DICTIONARY_INCLUDE'='intnewField', 'DEFAULT.VALUE.intNewField'='5')")
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE intnewField > 2"),
+ Seq(Row("abc"), Row("def")))
+ checkAnswer(sql("SELECT charField FROM addcolumntest WHERE intnewField < 2"), Seq())
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_add_and_filter()
+ afterAll
+ beforeAll
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_add_and_filter
}
test("test add msr column and check aggregate") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql(
- "alter table addcolumntest add columns(msrField DECIMAL(5,2))TBLPROPERTIES ('DEFAULT.VALUE" +
+ "ALTER TABLE addcolumntest ADD COLUMNS(msrField DECIMAL(5,2))TBLPROPERTIES ('DEFAULT.VALUE" +
".msrfield'= '123.45')")
- checkAnswer(sql("select sum(msrField) from addcolumntest"),
+ checkAnswer(sql("SELECT SUM(msrField) FROM addcolumntest"),
Row(new BigDecimal("246.90").setScale(2, RoundingMode.HALF_UP)))
+
+ afterAll
+ beforeAll
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ sql(
+ "ALTER TABLE addcolumntest ADD COLUMNS(msrField DECIMAL(5,2))TBLPROPERTIES ('DEFAULT.VALUE" +
+ ".msrfield'= '123.45')")
+ checkAnswer(sql("SELECT SUM(msrField) FROM addcolumntest"),
+ Row(new BigDecimal("246.90").setScale(2, RoundingMode.HALF_UP)))
+ }
+
+ test("test join on new column") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ checkAnswer(sql(
+ "SELECT t1.charField, t2.charField FROM addcolumntest t1, hivetable t2 WHERE t1.charField =" +
+ " t2.charField"),
+ Seq(Row("abc", "abc"), Row("def", "def")))
}
test("test compaction after adding new column") {
- sql("Alter table addcolumntest compact 'major'")
- checkExistence(sql("show segments for table addcolumntest"), true, "0Compacted")
- checkExistence(sql("show segments for table addcolumntest"), true, "1Compacted")
- checkExistence(sql("show segments for table addcolumntest"), true, "0.1Success")
- checkAnswer(sql("select charField from addcolumntest"), Seq(Row("abc"), Row("def")))
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ sql("ALTER TABLE addcolumntest COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "0.1Success")
+ checkAnswer(sql("SELECT charField FROM addcolumntest"), Seq(Row("abc"), Row("def")))
+
+ afterAll
+ beforeAll
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ sql("ALTER TABLE addcolumntest COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE addcolumntest"), true, "0.1Success")
+ checkAnswer(sql("SELECT charField FROM addcolumntest"), Seq(Row("abc"), Row("def")))
}
test("test add and drop column with data loading") {
+ def test_add_drop_load() = {
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql(
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP,decimalField DECIMAL(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE carbon_table DROP COLUMNS(timestampField)")
+ sql("SELECT * FROM carbon_table").collect
+ sql("ALTER TABLE carbon_table ADD COLUMNS(timestampField TIMESTAMP)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data5.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,decimalField,timestampField')")
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_add_drop_load()
+ afterAll
+ beforeAll
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_add_drop_load()
+ }
+
+ test("test add/drop and change datatype") {
+ def test_add_drop_change() = {
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql(
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP,decimalField DECIMAL(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE carbon_table DROP COLUMNS(charField)")
+ sql("SELECT * FROM carbon_table").collect
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField')")
+ sql(
+ "ALTER TABLE carbon_table ADD COLUMNS(charField STRING) TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data2.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
+ sql("SELECT * FROM carbon_table").collect
+ sql("ALTER TABLE carbon_table CHANGE decimalField decimalField DECIMAL(22,6)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data3.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_add_drop_change
+ afterAll
+ beforeAll
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_add_drop_change
+ }
+
+
+ test("test add column compaction") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
sql("DROP TABLE IF EXISTS carbon_table")
sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table drop columns(timestampField)")
- sql("select * from carbon_table").collect
- sql("Alter table carbon_table add columns(timestampField timestamp)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data5.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,decimalField,timestampField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE carbon_table ADD COLUMNS(decimalField DECIMAL(6,2))")
+
+ sql("ALTER TABLE carbon_table COMPACT 'minor'")
+
sql("DROP TABLE IF EXISTS carbon_table")
}
- test("test add/drop and change datatype") {
+ test("test to add column with char datatype") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
sql("DROP TABLE IF EXISTS carbon_table")
sql(
- "CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table carbon_table drop columns(charField)")
- sql("select * from carbon_table").collect
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE carbon_table ADD COLUMNS(newfield char(10)) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='char')")
+ checkAnswer(sql("SELECT DISTINCT(newfield) FROM carbon_table"),Row("char"))
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+
+ test("test to check if exception is thrown with wrong char syntax") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ intercept[Exception] {
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql(
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ sql(
+ "ALTER TABLE carbon_table ADD COLUMNS(newfield char) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+ }
+
+ test("test to add column with varchar datatype") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ sql("DROP TABLE IF EXISTS carbon_table")
sql(
- "Alter table carbon_table add columns(charField string) TBLPROPERTIES" +
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data2.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
- sql("select * from carbon_table").collect
- sql("ALTER TABLE carbon_table CHANGE decimalField decimalField decimal(22,6)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data3.csv' INTO TABLE carbon_table " +
- s"options('FILEHEADER'='intField,stringField,timestampField,decimalField,charField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table " +
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE carbon_table ADD COLUMNS(newfield varchar(10)) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='char')")
+ checkAnswer(sql("SELECT DISTINCT(newfield) FROM carbon_table"),Row("char"))
sql("DROP TABLE IF EXISTS carbon_table")
}
+ test("test to check if exception is thrown with wrong varchar syntax") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ intercept[Exception] {
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql(
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ sql(
+ "ALTER TABLE carbon_table ADD COLUMNS(newfield varchar) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+ }
+
+ test("test to check if exception is thrown if TABLE is locked for updation") {
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ intercept[Exception] {
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql(
+ "CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP)STORED BY 'carbondata' TBLPROPERTIES" +
+ "('DICTIONARY_EXCLUDE'='charField')")
+ val lockFilePath = s"${ TestQueryExecutor.storeLocation }/default/carbon_table/meta.lock"
+ new File(lockFilePath).createNewFile()
+ sql(
+ "ALTER TABLE carbon_table ADD COLUMNS(newfield STRING) TBLPROPERTIES ('DEFAULT.VALUE.newfield'='c')")
+ new FileOutputStream(lockFilePath).getChannel.lock()
+ sql(
+ "ALTER TABLE carbon_table DROP COLUMNS(newfield)")
+ new File(lockFilePath).delete()
+ sql("DROP TABLE IF EXISTS carbon_table")
+ }
+ }
+
test("test to check if select * works for new added column") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_new")
sql(
- "CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "CREATE TABLE carbon_new(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP,decimalField DECIMAL(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
+ "ALTER TABLE carbon_new ADD COLUMNS(newField STRING) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField','DEFAULT.VALUE.newField'='def')")
- checkAnswer(sql("select * from carbon_new limit 1"),
+ checkAnswer(sql("SELECT * FROM carbon_new LIMIT 1"),
Row(new Integer(100),
"spark",
"abc",
Timestamp.valueOf("2015-04-23 00:00:00.0"),
new BigDecimal(21.23).setScale(2, RoundingMode.HALF_UP),
"def"))
- sql("drop table carbon_new")
+ sql("DROP TABLE carbon_new")
}
test("test to check data if all columns are provided in select") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_new")
sql(
- "CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "CREATE TABLE carbon_new(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP,decimalField DECIMAL(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
+ "ALTER TABLE carbon_new ADD COLUMNS(newField STRING) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField')")
assert(sql(
- "select intField,stringField,charField,timestampField,decimalField, newField from " +
- "carbon_new limit 1").count().equals(1L))
- sql("drop table carbon_new")
+ "SELECT intField,stringField,charField,timestampField,decimalField, newField FROM " +
+ "carbon_new LIMIT 1").count().equals(1L))
+ sql("DROP TABLE carbon_new")
}
test("test to check data if new column query order is different from schema order") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_new")
sql(
- "CREATE TABLE carbon_new(intField int,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
+ "CREATE TABLE carbon_new(intField INT,stringField STRING,charField STRING,timestampField " +
+ "TIMESTAMP,decimalField DECIMAL(6,2))STORED BY 'carbondata' TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='charField')")
sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_new " +
- s"options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_new add columns(newField string) TBLPROPERTIES" +
+ "ALTER TABLE carbon_new ADD COLUMNS(newField STRING) TBLPROPERTIES" +
"('DICTIONARY_EXCLUDE'='newField','DEFAULT.VALUE.newField'='def')")
checkAnswer(sql(
- "select intField,stringField,charField,newField,timestampField,decimalField from " +
- "carbon_new limit 1"), Row(new Integer(100),
+ "SELECT intField,stringField,charField,newField,timestampField,decimalField FROM " +
+ "carbon_new LIMIT 1"), Row(new Integer(100),
"spark",
"abc",
"def",
Timestamp.valueOf("2015-04-23 00:00:00.0"),
new BigDecimal(21.23).setScale(2, RoundingMode.HALF_UP)))
- sql("drop table carbon_new")
+ sql("DROP TABLE carbon_new")
}
test("test to check if vector result collector is able to fetch large amount of data") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_new")
sql(
- """CREATE TABLE carbon_new (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB
- |timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1
- |decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2
- |double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES
+ """CREATE TABLE carbon_new (CUST_ID INT,CUST_NAME STRING,ACTIVE_EMUI_VERSION STRING, DOB
+ |TIMESTAMP, DOJ TIMESTAMP, BIGINT_COLUMN1 BIGINT,BIGINT_COLUMN2 BIGINT,DECIMAL_COLUMN1
+ |decimal(30,10), DECIMAL_COLUMN2 DECIMAL(36,10),Double_COLUMN1 double, Double_COLUMN2
+ |double,INTEGER_COLUMN1 INT) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES
|("TABLE_BLOCKSIZE"= "256 MB")""".stripMargin)
- sql("alter table carbon_new drop columns(CUST_NAME)")
- sql(s"LOAD DATA INPATH '$resourcesPath/restructure/data_2000.csv' into table " +
+ sql("ALTER TABLE carbon_new DROP COLUMNS(CUST_NAME)")
+ sql(s"LOAD DATA INPATH '$resourcesPath/restructure/data_2000.csv' INTO TABLE " +
"carbon_new OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\"','BAD_RECORDS_ACTION'='FORCE'," +
"'FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1," +
"BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2," +
"INTEGER_COLUMN1')")
sql(
- """alter table carbon_new add columns(CUST_NAME string) TBLPROPERTIES
+ """ALTER TABLE carbon_new ADD COLUMNS(CUST_NAME STRING) TBLPROPERTIES
('DICTIONARY_EXCLUDE'='CUST_NAME', 'DEFAULT.VALUE.CUST_NAME'='testuser')""")
- checkAnswer(sql("select distinct(CUST_NAME) from carbon_new"),Row("testuser"))
+ checkAnswer(sql("SELECT DISTINCT(CUST_NAME) FROM carbon_new"),Row("testuser"))
}
test("test for checking newly added measure column for is null condition") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_measure_is_null")
- sql("CREATE TABLE carbon_measure_is_null (CUST_ID int,CUST_NAME String) STORED BY 'carbondata'")
+ sql("CREATE TABLE carbon_measure_is_null (CUST_ID INT,CUST_NAME STRING) STORED BY 'carbondata'")
sql(
- s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' into table carbon_measure_is_null" +
+ s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' INTO TABLE carbon_measure_is_null" +
s" OPTIONS" +
s"('BAD_RECORDS_LOGGER_ENABLE'='TRUE', " +
s"'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME')")
- sql("ALTER TABLE carbon_measure_is_null ADD COLUMNS (a6 int)")
+ sql("ALTER TABLE carbon_measure_is_null ADD COLUMNS (a6 INT)")
sql(
- s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' into table carbon_measure_is_null" +
+ s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' INTO TABLE carbon_measure_is_null" +
s" OPTIONS" +
s"('BAD_RECORDS_LOGGER_ENABLE'='TRUE', " +
s"'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,a6')")
- sql("select a6 from carbon_measure_is_null where a6 is null").show
- checkAnswer(sql("select * from carbon_measure_is_null"),
- sql("select * from carbon_measure_is_null where a6 is null"))
- checkAnswer(sql("select count(*) from carbon_measure_is_null where a6 is not null"), Row(0))
+ sql("SELECT a6 FROM carbon_measure_is_null WHERE a6 IS NULL").show
+ checkAnswer(sql("SELECT * FROM carbon_measure_is_null"),
+ sql("SELECT * FROM carbon_measure_is_null WHERE a6 IS NULL"))
+ checkAnswer(sql("SELECT count(*) FROM carbon_measure_is_null WHERE a6 IS NOT NULL"), Row(0))
sql("DROP TABLE IF EXISTS carbon_measure_is_null")
}
test("test to check if intField returns correct result") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField int) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField INT) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='67890')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(67890))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(67890))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test to check if shortField returns correct result") {
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField short) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField short) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='1')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(1))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(1))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test to check if doubleField returns correct result") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField double) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField double) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='1457567.87')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(1457567.87))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(1457567.87))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test to check if decimalField returns correct result") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField decimal(5,2)) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField DECIMAL(5,2)) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='21.87')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(21.87))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(21.87))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test for checking newly added dictionary column for is null condition") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_dictionary_is_null")
sql(
- "CREATE TABLE carbon_dictionary_is_null (CUST_ID int,CUST_NAME String) STORED BY " +
+ "CREATE TABLE carbon_dictionary_is_null (CUST_ID INT,CUST_NAME STRING) STORED BY " +
"'carbondata'")
sql(
- s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' into table " +
+ s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' INTO TABLE " +
s"carbon_dictionary_is_null" +
s" OPTIONS" +
s"('BAD_RECORDS_LOGGER_ENABLE'='TRUE', " +
s"'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME')")
sql(
- "ALTER TABLE carbon_dictionary_is_null ADD COLUMNS (a6 int) tblproperties" +
+ "ALTER TABLE carbon_dictionary_is_null ADD COLUMNS (a6 INT) TBLPROPERTIES" +
"('dictionary_include'='a6')")
sql(
- s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' into table " +
+ s"LOAD DATA INPATH '$resourcesPath/restructure/data6.csv' INTO TABLE " +
s"carbon_dictionary_is_null" +
s" OPTIONS" +
s"('BAD_RECORDS_LOGGER_ENABLE'='TRUE', " +
s"'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,a6')")
- checkAnswer(sql("select * from carbon_dictionary_is_null"),
- sql("select * from carbon_dictionary_is_null where a6 is null"))
- checkAnswer(sql("select count(*) from carbon_dictionary_is_null where a6 is not null"), Row(0))
+ checkAnswer(sql("SELECT * FROM carbon_dictionary_is_null"),
+ sql("SELECT * FROM carbon_dictionary_is_null WHERE a6 IS NULL"))
+ checkAnswer(sql("SELECT count(*) FROM carbon_dictionary_is_null WHERE a6 IS NOT NULL"), Row(0))
sql("DROP TABLE IF EXISTS carbon_dictionary_is_null")
}
test("test add column for new decimal column filter query") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS alter_decimal_filter")
sql(
- "create table alter_decimal_filter (n1 string, n2 int, n3 decimal(3,2)) stored by " +
+ "CREATE TABLE alter_decimal_filter (n1 STRING, n2 INT, n3 DECIMAL(3,2)) STORED BY " +
"'carbondata'")
- sql("insert into alter_decimal_filter select 'xx',1,1.22")
- sql("insert into alter_decimal_filter select 'xx',1,1.23")
- sql("alter table alter_decimal_filter change n3 n3 decimal(8,4)")
- sql("insert into alter_decimal_filter select 'dd',2,111.111")
- sql("select * from alter_decimal_filter where n3 = 1.22").show()
- checkAnswer(sql("select * from alter_decimal_filter where n3 = 1.22"),
+ sql("INSERT INTO alter_decimal_filter SELECT 'xx',1,1.22")
+ sql("INSERT INTO alter_decimal_filter SELECT 'xx',1,1.23")
+ sql("ALTER TABLE alter_decimal_filter CHANGE n3 n3 DECIMAL(8,4)")
+ sql("INSERT INTO alter_decimal_filter SELECT 'dd',2,111.111")
+ sql("SELECT * FROM alter_decimal_filter WHERE n3 = 1.22").show()
+ checkAnswer(sql("SELECT * FROM alter_decimal_filter WHERE n3 = 1.22"),
Row("xx", 1, new BigDecimal(1.2200).setScale(4, RoundingMode.HALF_UP)))
sql("DROP TABLE IF EXISTS alter_decimal_filter")
}
test("test add column with date") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField date) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField date) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='2017-01-01')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(Date.valueOf("2017-01-01")))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(Date.valueOf("2017-01-01")))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test add column with timestamp") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS carbon_table")
- sql("CREATE TABLE carbon_table(intField int,stringField string,charField string,timestampField timestamp, decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("CREATE TABLE carbon_table(intField INT,stringField STRING,charField STRING,timestampField TIMESTAMP, decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE carbon_table OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
sql(
- "Alter table carbon_table add columns(newField timestamp) TBLPROPERTIES" +
+ "ALTER TABLE carbon_table ADD COLUMNS(newField TIMESTAMP) TBLPROPERTIES" +
"('DEFAULT.VALUE.newField'='01-01-2017 00:00:00.0')")
- checkAnswer(sql("select distinct(newField) from carbon_table"), Row(Timestamp.valueOf("2017-01-01 00:00:00.0")))
+ checkAnswer(sql("SELECT DISTINCT(newField) FROM carbon_table"), Row(Timestamp.valueOf("2017-01-01 00:00:00.0")))
sql("DROP TABLE IF EXISTS carbon_table")
}
test("test compaction with all dictionary columns") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS alter_dict")
- sql("CREATE TABLE alter_dict(stringField string,charField string) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_dict options('FILEHEADER'='stringField,charField')")
- sql("Alter table alter_dict drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_dict options('FILEHEADER'='stringField')")
- sql("Alter table alter_dict compact 'major'")
- checkExistence(sql("show segments for table alter_dict"), true, "0Compacted")
- checkExistence(sql("show segments for table alter_dict"), true, "1Compacted")
- checkExistence(sql("show segments for table alter_dict"), true, "0.1Success")
+ sql("CREATE TABLE alter_dict(stringField STRING,charField STRING) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_dict OPTIONS('FILEHEADER'='stringField,charField')")
+ sql("ALTER TABLE alter_dict DROP COLUMNS(charField)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_dict OPTIONS('FILEHEADER'='stringField')")
+ sql("ALTER TABLE alter_dict COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_dict"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_dict"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_dict"), true, "0.1Success")
sql("DROP TABLE IF EXISTS alter_dict")
}
test("test sort_columns for add columns") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS alter_sort_columns")
sql(
- "CREATE TABLE alter_sort_columns(stringField string,charField string) STORED BY 'carbondata'")
+ "CREATE TABLE alter_sort_columns(stringField STRING,charField STRING) STORED BY 'carbondata'")
val caught = intercept[MalformedCarbonCommandException] {
sql(
- "Alter table alter_sort_columns add columns(newField Int) tblproperties" +
+ "ALTER TABLE alter_sort_columns ADD COLUMNS(newField INT) TBLPROPERTIES" +
"('sort_columns'='newField')")
}
assert(caught.getMessage.equals("Unsupported Table property in add column: sort_columns"))
}
test("test compaction with all no dictionary columns") {
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS alter_no_dict")
- sql("CREATE TABLE alter_no_dict(stringField string,charField string) STORED BY 'carbondata' TBLPROPERTIES('DICTIONARY_EXCLUDE'='stringField,charField')")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_no_dict options('FILEHEADER'='stringField,charField')")
- sql("Alter table alter_no_dict drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_no_dict options('FILEHEADER'='stringField')")
- sql("Alter table alter_no_dict compact 'major'")
- checkExistence(sql("show segments for table alter_no_dict"), true, "0Compacted")
- checkExistence(sql("show segments for table alter_no_dict"), true, "1Compacted")
- checkExistence(sql("show segments for table alter_no_dict"), true, "0.1Success")
+ sql("CREATE TABLE alter_no_dict(stringField STRING,charField STRING) STORED BY 'carbondata' TBLPROPERTIES('DICTIONARY_EXCLUDE'='stringField,charField')")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_no_dict OPTIONS('FILEHEADER'='stringField,charField')")
+ sql("ALTER TABLE alter_no_dict DROP COLUMNS(charField)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data7.csv' INTO TABLE alter_no_dict OPTIONS('FILEHEADER'='stringField')")
+ sql("ALTER TABLE alter_no_dict COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_no_dict"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_no_dict"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE alter_no_dict"), true, "0.1Success")
sql("DROP TABLE IF EXISTS alter_no_dict")
}
test("no inverted index load and alter table") {
-
- sql("drop table if exists indexAlter")
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ sql("DROP TABLE IF EXISTS indexAlter")
sql(
"""
- CREATE TABLE IF NOT EXISTS indexAlter
- (ID Int, date Timestamp, country String,
- name String, phonetype String, serialname String)
+ CREATE TABLE indexAlter
+ (ID Int, date TIMESTAMP, country STRING,
+ name STRING, phonetype STRING, serialname STRING)
STORED BY 'org.apache.carbondata.format'
TBLPROPERTIES('NO_INVERTED_INDEX'='country,name,phonetype')
""")
@@ -387,12 +574,12 @@ class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
val testData2 = s"$resourcesPath/source.csv"
sql(s"""
- LOAD DATA LOCAL INPATH '$testData2' into table indexAlter
+ LOAD DATA LOCAL INPATH '$testData2' INTO TABLE indexAlter
""")
- sql("alter table indexAlter add columns(salary String) tblproperties('no_inverted_index'='salary')")
+ sql("ALTER TABLE indexAlter ADD COLUMNS(salary STRING) TBLPROPERTIES('no_inverted_index'='salary')")
sql(s"""
- LOAD DATA LOCAL INPATH '$testData2' into table indexAlter
+ LOAD DATA LOCAL INPATH '$testData2' INTO TABLE indexAlter
""")
checkAnswer(
sql("""
@@ -408,9 +595,17 @@ class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
override def afterAll {
sql("DROP TABLE IF EXISTS addcolumntest")
- sql("drop table if exists hivetable")
- sql("drop table if exists alter_sort_columns")
- sql("drop table if exists indexAlter")
+ sql("DROP TABLE IF EXISTS hivetable")
+ sql("DROP TABLE IF EXISTS alter_sort_columns")
+ sql("DROP TABLE IF EXISTS indexAlter")
+ sql("DROP TABLE IF EXISTS carbon_table")
+ sql("DROP TABLE IF EXISTS carbon_new")
+ sql("DROP TABLE IF EXISTS carbon_measure_is_null")
+ sql("DROP TABLE IF EXISTS carbon_dictionary_is_null")
+ sql("DROP TABLE IF EXISTS alter_decimal_filter")
+ sql("DROP TABLE IF EXISTS alter_dict")
+ sql("DROP TABLE IF EXISTS alter_sort_columns")
+ sql("DROP TABLE IF EXISTS alter_no_dict")
sqlContext.setConf("carbon.enable.vector.reader", "false")
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
index cbda10b..9f104ed 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/ChangeDataTypeTestCases.scala
@@ -26,88 +26,130 @@ import org.scalatest.BeforeAndAfterAll
class ChangeDataTypeTestCases extends Spark2QueryTest with BeforeAndAfterAll {
override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS changedatatypetest")
- sql("drop table if exists hivetable")
+ sql("DROP TABLE IF EXISTS hivetable")
}
test("test change datatype on existing column and load data, insert into hive table") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(
- "CREATE TABLE hivetable(intField bigint,stringField string,charField string,timestampField " +
- "timestamp,decimalField decimal(6,2)) stored as parquet")
- sql("insert into table hivetable select * from changedatatypetest")
- afterAll
+ def test_change_column_load_insert() = {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
+ sql(
+ "CREATE TABLE hivetable(intField BIGINT,stringField STRING,charField STRING,timestampField "
+ + "TIMESTAMP,decimalField DECIMAL(6,2)) STORED AS PARQUET")
+ sql("INSERT INTO TABLE hivetable SELECT * FROM changedatatypetest")
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_change_column_load_insert()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_change_column_load_insert()
}
test("test datatype change and filter") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select charField from changedatatypetest where intField > 99"),
- Seq(Row("abc"), Row("abc")))
- checkAnswer(sql("select charField from changedatatypetest where intField < 99"), Seq())
- checkAnswer(sql("select charField from changedatatypetest where intField = 100"),
- Seq(Row("abc"), Row("abc")))
- afterAll
+ def test_change_datatype_and_filter() = {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField > 99"),
+ Seq(Row("abc"), Row("abc")))
+ checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField < 99"), Seq())
+ checkAnswer(sql("SELECT charField FROM changedatatypetest WHERE intField = 100"),
+ Seq(Row("abc"), Row("abc")))
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_change_datatype_and_filter
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_change_datatype_and_filter
}
test("test change int datatype and load data") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change intField intfield bigint")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select sum(intField) from changedatatypetest"), Row(200))
- afterAll
+ def test_change_int_and_load() = {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ sql("ALTER TABLE changedatatypetest CHANGE intField intfield BIGINT")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ checkAnswer(sql("SELECT SUM(intField) FROM changedatatypetest"), Row(200))
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_change_int_and_load()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_change_int_and_load()
}
test("test change decimal datatype and compaction") {
- beforeAll
- sql(
- "CREATE TABLE changedatatypetest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- sql("Alter table changedatatypetest change decimalField decimalField decimal(9,5)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
- s"changedatatypetest options('FILEHEADER'='intField,stringField,charField,timestampField," +
- s"decimalField')")
- checkAnswer(sql("select decimalField from changedatatypetest"),
- Seq(Row(new BigDecimal("21.23").setScale(5)), Row(new BigDecimal("21.23").setScale(5))))
- sql("alter table changedatatypetest compact 'major'")
- checkExistence(sql("show segments for table changedatatypetest"), true, "0Compacted")
- checkExistence(sql("show segments for table changedatatypetest"), true, "1Compacted")
- checkExistence(sql("show segments for table changedatatypetest"), true, "0.1Success")
- afterAll
+ def test_change_decimal_and_compaction() = {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ sql("ALTER TABLE changedatatypetest CHANGE decimalField decimalField DECIMAL(9,5)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ checkAnswer(sql("SELECT decimalField FROM changedatatypetest"),
+ Seq(Row(new BigDecimal("21.23").setScale(5)), Row(new BigDecimal("21.23").setScale(5))))
+ sql("ALTER TABLE changedatatypetest COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE changedatatypetest"), true, "0.1Success")
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_change_decimal_and_compaction()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_change_decimal_and_compaction()
+ }
+
+ test("test to change int datatype to long") {
+ def test_change_int_to_long() = {
+ beforeAll
+ sql(
+ "CREATE TABLE changedatatypetest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE " +
+ s"changedatatypetest OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,"
+ + s"decimalField')")
+ sql("ALTER TABLE changedatatypetest CHANGE intField intField LONG")
+ checkAnswer(sql("SELECT intField FROM changedatatypetest LIMIT 1"), Row(100))
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_change_int_to_long()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_change_int_to_long()
}
override def afterAll {
sql("DROP TABLE IF EXISTS changedatatypetest")
- sql("drop table if exists hivetable")
+ sql("DROP TABLE IF EXISTS hivetable")
sqlContext.setConf("carbon.enable.vector.reader", "false")
}
}
http://git-wip-us.apache.org/repos/asf/carbondata/blob/cf2a829d/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
index b757ad0..00e4a14 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/DropColumnTestCases.scala
@@ -28,61 +28,79 @@ import org.apache.carbondata.core.util.CarbonProperties
class DropColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
override def beforeAll {
- sqlContext.setConf("carbon.enable.vector.reader", "true")
sql("DROP TABLE IF EXISTS dropcolumntest")
- sql("drop table if exists hivetable")
+ sql("DROP TABLE IF EXISTS hivetable")
}
test("test drop column and insert into hive table") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(
- "CREATE TABLE hivetable(intField int,stringField string,timestampField timestamp," +
- "decimalField decimal(6,2)) stored as parquet")
- sql("insert into table hivetable select * from dropcolumntest")
- checkAnswer(sql("select * from hivetable"), sql("select * from dropcolumntest"))
- afterAll
+ def test_drop_and_insert() = {
+ beforeAll
+ sql(
+ "CREATE TABLE dropcolumntest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest"
+ + s" OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE dropcolumntest DROP COLUMNS(charField)")
+ sql(
+ "CREATE TABLE hivetable(intField INT,stringField STRING,timestampField TIMESTAMP," +
+ "decimalField DECIMAL(6,2)) STORED AS PARQUET")
+ sql("INSERT INTO TABLE hivetable SELECT * FROM dropcolumntest")
+ checkAnswer(sql("SELECT * FROM hivetable"), sql("SELECT * FROM dropcolumntest"))
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_drop_and_insert()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_drop_and_insert()
}
test("test drop column and load data") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- checkAnswer(sql("select count(*) from dropcolumntest"), Row(2))
- afterAll
+ def test_drop_and_load() = {
+ beforeAll
+ sql(
+ "CREATE TABLE dropcolumntest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest"
+ + s" OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE dropcolumntest DROP COLUMNS(charField)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest"
+ + s" OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField')")
+ checkAnswer(sql("SELECT count(*) FROM dropcolumntest"), Row(2))
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_drop_and_load
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_drop_and_load
+
}
test("test drop column and compaction") {
- beforeAll
- sql(
- "CREATE TABLE dropcolumntest(intField int,stringField string,charField string," +
- "timestampField timestamp,decimalField decimal(6,2)) STORED BY 'carbondata'")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
- sql("Alter table dropcolumntest drop columns(charField)")
- sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest" +
- s" options('FILEHEADER'='intField,stringField,timestampField,decimalField')")
- sql("alter table dropcolumntest compact 'major'")
- checkExistence(sql("show segments for table dropcolumntest"), true, "0Compacted")
- checkExistence(sql("show segments for table dropcolumntest"), true, "1Compacted")
- checkExistence(sql("show segments for table dropcolumntest"), true, "0.1Success")
- afterAll
+ def test_drop_and_compaction() = {
+ beforeAll
+ sql(
+ "CREATE TABLE dropcolumntest(intField INT,stringField STRING,charField STRING," +
+ "timestampField TIMESTAMP,decimalField DECIMAL(6,2)) STORED BY 'carbondata'")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE dropcolumntest"
+ + s" OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
+ sql("ALTER TABLE dropcolumntest DROP COLUMNS(charField)")
+ sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data4.csv' INTO TABLE dropcolumntest"
+ + s" OPTIONS('FILEHEADER'='intField,stringField,timestampField,decimalField')")
+ sql("ALTER TABLE dropcolumntest COMPACT 'major'")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE dropcolumntest"), true, "0Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE dropcolumntest"), true, "1Compacted")
+ checkExistence(sql("SHOW SEGMENTS FOR TABLE dropcolumntest"), true, "0.1Success")
+ afterAll
+ }
+ sqlContext.setConf("carbon.enable.vector.reader", "true")
+ test_drop_and_compaction()
+ sqlContext.setConf("carbon.enable.vector.reader", "false")
+ test_drop_and_compaction()
}
override def afterAll {
sql("DROP TABLE IF EXISTS dropcolumntest")
- sql("drop table if exists hivetable")
+ sql("DROP TABLE IF EXISTS hivetable")
sqlContext.setConf("carbon.enable.vector.reader", "false")
}
}