You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by sp...@apache.org on 2016/05/26 15:40:41 UTC
[19/66] [abbrv] hive git commit: HIVE-13736: View's input/output
formats are TEXT by default. (Yongzhi Chen, reviewed by Chaoyu Tang)
HIVE-13736: View's input/output formats are TEXT by default. (Yongzhi Chen, reviewed by Chaoyu Tang)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/c229f995
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/c229f995
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/c229f995
Branch: refs/heads/java8
Commit: c229f99580dba0eee83e253c5b7c4c596fc0d215
Parents: eff6e05
Author: Yongzhi Chen <yc...@apache.org>
Authored: Mon May 16 15:40:22 2016 -0400
Committer: Yongzhi Chen <yc...@apache.org>
Committed: Tue May 24 11:10:06 2016 -0400
----------------------------------------------------------------------
.../org/apache/hadoop/hive/ql/exec/DDLTask.java | 13 ++
.../hadoop/hive/ql/parse/SemanticAnalyzer.java | 5 +-
.../hadoop/hive/ql/plan/CreateViewDesc.java | 24 +++-
.../clientpositive/create_view_defaultformats.q | 14 ++
.../alter_view_as_select_with_partition.q.out | 4 +-
.../clientpositive/alter_view_as_select.q.out | 12 +-
.../clientpositive/create_or_replace_view.q.out | 20 +--
.../results/clientpositive/create_view.q.out | 56 ++++----
.../create_view_defaultformats.q.out | 128 +++++++++++++++++++
.../create_view_partitioned.q.out | 20 +--
.../clientpositive/create_view_translate.q.out | 8 +-
.../llap/selectDistinctStar.q.out | 8 +-
.../clientpositive/selectDistinctStar.q.out | 8 +-
.../clientpositive/tez/selectDistinctStar.q.out | 8 +-
14 files changed, 254 insertions(+), 74 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 1ebe963..717589a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -4159,6 +4159,12 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
oldview.getTTable().getParameters().putAll(crtView.getTblProps());
}
oldview.setPartCols(crtView.getPartCols());
+ if (crtView.getInputFormat() != null) {
+ oldview.setInputFormatClass(crtView.getInputFormat());
+ }
+ if (crtView.getOutputFormat() != null) {
+ oldview.setOutputFormatClass(crtView.getOutputFormat());
+ }
oldview.checkValidity(null);
try {
db.alterTable(crtView.getViewName(), oldview, null);
@@ -4186,6 +4192,13 @@ public class DDLTask extends Task<DDLWork> implements Serializable {
tbl.setPartCols(crtView.getPartCols());
}
+ if (crtView.getInputFormat() != null) {
+ tbl.setInputFormatClass(crtView.getInputFormat());
+ }
+ if (crtView.getOutputFormat() != null) {
+ tbl.setOutputFormatClass(crtView.getOutputFormat());
+ }
+
db.createTable(tbl, crtView.getIfNotExists());
work.getOutputs().add(new WriteEntity(tbl, WriteEntity.WriteType.DDL_NO_LOCK));
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
index 8c93018..265dd7e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java
@@ -11762,8 +11762,11 @@ public class SemanticAnalyzer extends BaseSemanticAnalyzer {
orReplace = true;
}
+ StorageFormat defaultFmt = new StorageFormat(conf);
+ defaultFmt.fillDefaultStorageFormat(false);
createVwDesc = new CreateViewDesc(
- dbDotTable, cols, comment, tblProps, partColNames,
+ dbDotTable, cols, comment, defaultFmt.getInputFormat(),
+ defaultFmt.getOutputFormat(), tblProps, partColNames,
ifNotExists, orReplace, isAlterViewAs);
unparseTranslator.enable();
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
index a5cf076..81c4f77 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
@@ -36,6 +36,8 @@ public class CreateViewDesc extends DDLDesc implements Serializable {
private static final long serialVersionUID = 1L;
private String viewName;
+ private String inputFormat;
+ private String outputFormat;
private String originalText;
private String expandedText;
private List<FieldSchema> schema;
@@ -54,12 +56,15 @@ public class CreateViewDesc extends DDLDesc implements Serializable {
}
public CreateViewDesc(String viewName, List<FieldSchema> schema,
- String comment, Map<String, String> tblProps,
+ String comment, String inputFormat,
+ String outputFormat, Map<String, String> tblProps,
List<String> partColNames, boolean ifNotExists,
boolean orReplace, boolean isAlterViewAs) {
this.viewName = viewName;
this.schema = schema;
this.comment = comment;
+ this.inputFormat = inputFormat;
+ this.outputFormat = outputFormat;
this.tblProps = tblProps;
this.partColNames = partColNames;
this.ifNotExists = ifNotExists;
@@ -172,4 +177,21 @@ public class CreateViewDesc extends DDLDesc implements Serializable {
public void setIsAlterViewAs(boolean isAlterViewAs) {
this.isAlterViewAs = isAlterViewAs;
}
+
+ public String getInputFormat() {
+ return inputFormat;
+ }
+
+ public void setInputFormat(String inputFormat) {
+ this.inputFormat = inputFormat;
+ }
+
+ public String getOutputFormat() {
+ return outputFormat;
+ }
+
+ public void setOutputFormat(String outputFormat) {
+ this.outputFormat = outputFormat;
+ }
+
}
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/queries/clientpositive/create_view_defaultformats.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/create_view_defaultformats.q b/ql/src/test/queries/clientpositive/create_view_defaultformats.q
new file mode 100644
index 0000000..66fa141
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/create_view_defaultformats.q
@@ -0,0 +1,14 @@
+drop view if exists sfsrc;
+drop view if exists rcsrc;
+set hive.default.fileformat=SequenceFile;
+create view sfsrc as select * from src;
+set hive.default.fileformat=RcFile;
+create view rcsrc as select * from src;
+describe formatted sfsrc;
+describe formatted rcsrc;
+select * from sfsrc where key = 100 limit 1;
+select * from rcsrc where key = 100 limit 1;
+drop view sfsrc;
+drop view rcsrc;
+set hive.default.fileformat=TextFile;
+
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
index 1cbfd75..9b84227 100644
--- a/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
+++ b/ql/src/test/results/clientnegative/alter_view_as_select_with_partition.q.out
@@ -55,8 +55,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/alter_view_as_select.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/alter_view_as_select.q.out b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
index 3666221..2d82395 100644
--- a/ql/src/test/results/clientpositive/alter_view_as_select.q.out
+++ b/ql/src/test/results/clientpositive/alter_view_as_select.q.out
@@ -37,8 +37,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -77,8 +77,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -126,8 +126,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/create_or_replace_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_or_replace_view.q.out b/ql/src/test/results/clientpositive/create_or_replace_view.q.out
index dd5bf13..f6f26d2 100644
--- a/ql/src/test/results/clientpositive/create_or_replace_view.q.out
+++ b/ql/src/test/results/clientpositive/create_or_replace_view.q.out
@@ -37,8 +37,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -127,8 +127,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -220,8 +220,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -290,8 +290,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -381,8 +381,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/create_view.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view.q.out b/ql/src/test/results/clientpositive/create_view.q.out
index 809f701..d9c1e11 100644
--- a/ql/src/test/results/clientpositive/create_view.q.out
+++ b/ql/src/test/results/clientpositive/create_view.q.out
@@ -257,8 +257,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -306,8 +306,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -353,8 +353,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -402,8 +402,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -764,8 +764,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -844,8 +844,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -922,8 +922,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -990,8 +990,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1066,8 +1066,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1138,8 +1138,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1223,8 +1223,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1344,8 +1344,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1460,8 +1460,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -1545,8 +1545,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/create_view_defaultformats.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view_defaultformats.q.out b/ql/src/test/results/clientpositive/create_view_defaultformats.q.out
new file mode 100644
index 0000000..dbc4a20
--- /dev/null
+++ b/ql/src/test/results/clientpositive/create_view_defaultformats.q.out
@@ -0,0 +1,128 @@
+PREHOOK: query: drop view if exists sfsrc
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: drop view if exists sfsrc
+POSTHOOK: type: DROPVIEW
+PREHOOK: query: drop view if exists rcsrc
+PREHOOK: type: DROPVIEW
+POSTHOOK: query: drop view if exists rcsrc
+POSTHOOK: type: DROPVIEW
+PREHOOK: query: create view sfsrc as select * from src
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@sfsrc
+POSTHOOK: query: create view sfsrc as select * from src
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@sfsrc
+PREHOOK: query: create view rcsrc as select * from src
+PREHOOK: type: CREATEVIEW
+PREHOOK: Input: default@src
+PREHOOK: Output: database:default
+PREHOOK: Output: default@rcsrc
+POSTHOOK: query: create view rcsrc as select * from src
+POSTHOOK: type: CREATEVIEW
+POSTHOOK: Input: default@src
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@rcsrc
+PREHOOK: query: describe formatted sfsrc
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@sfsrc
+POSTHOOK: query: describe formatted sfsrc
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@sfsrc
+# col_name data_type comment
+
+key string
+value string
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+Table Type: VIRTUAL_VIEW
+Table Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: null
+InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+
+# View Information
+View Original Text: select * from src
+View Expanded Text: select `src`.`key`, `src`.`value` from `default`.`src`
+PREHOOK: query: describe formatted rcsrc
+PREHOOK: type: DESCTABLE
+PREHOOK: Input: default@rcsrc
+POSTHOOK: query: describe formatted rcsrc
+POSTHOOK: type: DESCTABLE
+POSTHOOK: Input: default@rcsrc
+# col_name data_type comment
+
+key string
+value string
+
+# Detailed Table Information
+Database: default
+#### A masked pattern was here ####
+Retention: 0
+Table Type: VIRTUAL_VIEW
+Table Parameters:
+#### A masked pattern was here ####
+
+# Storage Information
+SerDe Library: null
+InputFormat: org.apache.hadoop.hive.ql.io.RCFileInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.RCFileOutputFormat
+Compressed: No
+Num Buckets: -1
+Bucket Columns: []
+Sort Columns: []
+
+# View Information
+View Original Text: select * from src
+View Expanded Text: select `src`.`key`, `src`.`value` from `default`.`src`
+PREHOOK: query: select * from sfsrc where key = 100 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@sfsrc
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select * from sfsrc where key = 100 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@sfsrc
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+100 val_100
+PREHOOK: query: select * from rcsrc where key = 100 limit 1
+PREHOOK: type: QUERY
+PREHOOK: Input: default@rcsrc
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: select * from rcsrc where key = 100 limit 1
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@rcsrc
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+100 val_100
+PREHOOK: query: drop view sfsrc
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@sfsrc
+PREHOOK: Output: default@sfsrc
+POSTHOOK: query: drop view sfsrc
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@sfsrc
+POSTHOOK: Output: default@sfsrc
+PREHOOK: query: drop view rcsrc
+PREHOOK: type: DROPVIEW
+PREHOOK: Input: default@rcsrc
+PREHOOK: Output: default@rcsrc
+POSTHOOK: query: drop view rcsrc
+POSTHOOK: type: DROPVIEW
+POSTHOOK: Input: default@rcsrc
+POSTHOOK: Output: default@rcsrc
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/create_view_partitioned.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view_partitioned.q.out b/ql/src/test/results/clientpositive/create_view_partitioned.q.out
index caa2251..15d777a 100644
--- a/ql/src/test/results/clientpositive/create_view_partitioned.q.out
+++ b/ql/src/test/results/clientpositive/create_view_partitioned.q.out
@@ -78,8 +78,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -175,8 +175,8 @@ POSTHOOK: type: SHOW_TABLESTATUS
tableName:vp1
#### A masked pattern was here ####
location:null
-inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat
-outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { string key}
partitioned:true
partitionColumns:struct partition_columns { string value}
@@ -188,8 +188,8 @@ POSTHOOK: type: SHOW_TABLESTATUS
tableName:vp1
#### A masked pattern was here ####
location:null
-inputformat:org.apache.hadoop.mapred.SequenceFileInputFormat
-outputformat:org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+inputformat:org.apache.hadoop.mapred.TextInputFormat
+outputformat:org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
columns:struct columns { string key}
partitioned:true
partitionColumns:struct partition_columns { string value}
@@ -285,8 +285,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -398,8 +398,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/create_view_translate.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/create_view_translate.q.out b/ql/src/test/results/clientpositive/create_view_translate.q.out
index 886a01b..2789f8f 100644
--- a/ql/src/test/results/clientpositive/create_view_translate.q.out
+++ b/ql/src/test/results/clientpositive/create_view_translate.q.out
@@ -36,8 +36,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -81,8 +81,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out
index 5594a0e..fbc8567 100644
--- a/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out
+++ b/ql/src/test/results/clientpositive/llap/selectDistinctStar.q.out
@@ -1397,8 +1397,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -3850,8 +3850,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/selectDistinctStar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/selectDistinctStar.q.out
index a95e945..d54fa68 100644
--- a/ql/src/test/results/clientpositive/selectDistinctStar.q.out
+++ b/ql/src/test/results/clientpositive/selectDistinctStar.q.out
@@ -1370,8 +1370,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -3796,8 +3796,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
http://git-wip-us.apache.org/repos/asf/hive/blob/c229f995/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out b/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out
index 8c96260..050bd79 100644
--- a/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out
+++ b/ql/src/test/results/clientpositive/tez/selectDistinctStar.q.out
@@ -1393,8 +1393,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []
@@ -3842,8 +3842,8 @@ Table Parameters:
# Storage Information
SerDe Library: null
-InputFormat: org.apache.hadoop.mapred.SequenceFileInputFormat
-OutputFormat: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+InputFormat: org.apache.hadoop.mapred.TextInputFormat
+OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
Compressed: No
Num Buckets: -1
Bucket Columns: []