You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@drill.apache.org by br...@apache.org on 2015/06/20 02:07:04 UTC

[1/5] drill git commit: DRILL-1820

Repository: drill
Updated Branches:
  refs/heads/gh-pages d476ce583 -> 3d1c00554


DRILL-1820

consistent acronym def

query > select statement

new config spotfire server file

minor edits

CTAS partitioning


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/00efc0f1
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/00efc0f1
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/00efc0f1

Branch: refs/heads/gh-pages
Commit: 00efc0f1935dee9b8020f9fc778aeb90c7905ded
Parents: cfd4573
Author: Kristine Hahn <kh...@maprtech.com>
Authored: Thu Jun 18 17:36:47 2015 -0700
Committer: Kristine Hahn <kh...@maprtech.com>
Committed: Fri Jun 19 14:43:21 2015 -0700

----------------------------------------------------------------------
 _data/docs.json                                 | 232 ++++++++++++++++---
 .../040-parquet-format.md                       |   2 +-
 .../010-compiling-drill-from-source.md          |   9 +-
 .../020-drill-patch-review-tool.md              |   2 +-
 .../050-configuring-spotfire-server.md          | 123 +++++++++-
 .../performance-tuning/020-partition-pruning.md |  27 ++-
 _docs/query-data/010-query-data-introduction.md |  38 +--
 _docs/sql-reference/090-sql-extensions.md       |   2 +-
 .../data-types/010-supported-data-types.md      |   2 +-
 .../sql-commands/030-create-table-as.md         |   4 +-
 .../sql-commands/035-partition-by-clause.md     |  37 +++
 .../sql-functions/020-data-type-conversion.md   |   2 +-
 .../030-analyzing-the-yelp-academic-dataset.md  |   2 +-
 13 files changed, 409 insertions(+), 73 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_data/docs.json
----------------------------------------------------------------------
diff --git a/_data/docs.json b/_data/docs.json
index 6989a98..ddd3a4e 100644
--- a/_data/docs.json
+++ b/_data/docs.json
@@ -601,8 +601,8 @@
                 }
             ], 
             "children": [], 
-            "next_title": "CREATE VIEW", 
-            "next_url": "/docs/create-view/", 
+            "next_title": "PARTITION BY Clause", 
+            "next_url": "/docs/partition-by-clause/", 
             "parent": "SQL Commands", 
             "previous_title": "ALTER SYSTEM", 
             "previous_url": "/docs/alter-system/", 
@@ -625,8 +625,8 @@
             "next_title": "DROP VIEW", 
             "next_url": "/docs/drop-view/", 
             "parent": "SQL Commands", 
-            "previous_title": "CREATE TABLE AS (CTAS)", 
-            "previous_url": "/docs/create-table-as-ctas/", 
+            "previous_title": "PARTITION BY Clause", 
+            "previous_url": "/docs/partition-by-clause/", 
             "relative_path": "_docs/sql-reference/sql-commands/050-create-view.md", 
             "title": "CREATE VIEW", 
             "url": "/docs/create-view/"
@@ -1373,6 +1373,27 @@
             "title": "Configuring Resources for a Shared Drillbit", 
             "url": "/docs/configuring-resources-for-a-shared-drillbit/"
         }, 
+        "Configuring Tibco Spotfire Server with Drill": {
+            "breadcrumbs": [
+                {
+                    "title": "Using Drill with BI Tools", 
+                    "url": "/docs/using-drill-with-bi-tools/"
+                }, 
+                {
+                    "title": "ODBC/JDBC Interfaces", 
+                    "url": "/docs/odbc-jdbc-interfaces/"
+                }
+            ], 
+            "children": [], 
+            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
+            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
+            "parent": "Using Drill with BI Tools", 
+            "previous_title": "Using Tibco Spotfire with Drill", 
+            "previous_url": "/docs/using-tibco-spotfire-with-drill/", 
+            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md", 
+            "title": "Configuring Tibco Spotfire Server with Drill", 
+            "url": "/docs/configuring-tibco-spotfire-server-with-drill/"
+        }, 
         "Configuring User Authentication": {
             "breadcrumbs": [
                 {
@@ -5352,8 +5373,8 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
-                            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
+                            "next_title": "Configuring Tibco Spotfire Server with Drill", 
+                            "next_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using MicroStrategy Analytics with Apache Drill", 
                             "previous_url": "/docs/using-microstrategy-analytics-with-apache-drill/", 
@@ -5373,11 +5394,32 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Using Apache Drill with Tableau 9 Server", 
-                            "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
+                            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using Tibco Spotfire with Drill", 
                             "previous_url": "/docs/using-tibco-spotfire-with-drill/", 
+                            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md", 
+                            "title": "Configuring Tibco Spotfire Server with Drill", 
+                            "url": "/docs/configuring-tibco-spotfire-server-with-drill/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "Using Drill with BI Tools", 
+                                    "url": "/docs/using-drill-with-bi-tools/"
+                                }, 
+                                {
+                                    "title": "ODBC/JDBC Interfaces", 
+                                    "url": "/docs/odbc-jdbc-interfaces/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "Using Apache Drill with Tableau 9 Server", 
+                            "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "parent": "Using Drill with BI Tools", 
+                            "previous_title": "Configuring Tibco Spotfire Server with Drill", 
+                            "previous_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                             "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/060-using-apache-drill-with-tableau-9-desktop.md", 
                             "title": "Using Apache Drill with Tableau 9 Desktop", 
                             "url": "/docs/using-apache-drill-with-tableau-9-desktop/"
@@ -5503,6 +5545,27 @@
             "title": "Operators", 
             "url": "/docs/operators/"
         }, 
+        "PARTITION BY Clause": {
+            "breadcrumbs": [
+                {
+                    "title": "SQL Commands", 
+                    "url": "/docs/sql-commands/"
+                }, 
+                {
+                    "title": "SQL Reference", 
+                    "url": "/docs/sql-reference/"
+                }
+            ], 
+            "children": [], 
+            "next_title": "CREATE VIEW", 
+            "next_url": "/docs/create-view/", 
+            "parent": "SQL Commands", 
+            "previous_title": "CREATE TABLE AS (CTAS)", 
+            "previous_url": "/docs/create-table-as-ctas/", 
+            "relative_path": "_docs/sql-reference/sql-commands/035-partition-by-clause.md", 
+            "title": "PARTITION BY Clause", 
+            "url": "/docs/partition-by-clause/"
+        }, 
         "Parquet Format": {
             "breadcrumbs": [
                 {
@@ -7738,8 +7801,8 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "CREATE VIEW", 
-                    "next_url": "/docs/create-view/", 
+                    "next_title": "PARTITION BY Clause", 
+                    "next_url": "/docs/partition-by-clause/", 
                     "parent": "SQL Commands", 
                     "previous_title": "ALTER SYSTEM", 
                     "previous_url": "/docs/alter-system/", 
@@ -7759,11 +7822,32 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "DROP VIEW", 
-                    "next_url": "/docs/drop-view/", 
+                    "next_title": "CREATE VIEW", 
+                    "next_url": "/docs/create-view/", 
                     "parent": "SQL Commands", 
                     "previous_title": "CREATE TABLE AS (CTAS)", 
                     "previous_url": "/docs/create-table-as-ctas/", 
+                    "relative_path": "_docs/sql-reference/sql-commands/035-partition-by-clause.md", 
+                    "title": "PARTITION BY Clause", 
+                    "url": "/docs/partition-by-clause/"
+                }, 
+                {
+                    "breadcrumbs": [
+                        {
+                            "title": "SQL Commands", 
+                            "url": "/docs/sql-commands/"
+                        }, 
+                        {
+                            "title": "SQL Reference", 
+                            "url": "/docs/sql-reference/"
+                        }
+                    ], 
+                    "children": [], 
+                    "next_title": "DROP VIEW", 
+                    "next_url": "/docs/drop-view/", 
+                    "parent": "SQL Commands", 
+                    "previous_title": "PARTITION BY Clause", 
+                    "previous_url": "/docs/partition-by-clause/", 
                     "relative_path": "_docs/sql-reference/sql-commands/050-create-view.md", 
                     "title": "CREATE VIEW", 
                     "url": "/docs/create-view/"
@@ -8900,8 +8984,8 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "CREATE VIEW", 
-                            "next_url": "/docs/create-view/", 
+                            "next_title": "PARTITION BY Clause", 
+                            "next_url": "/docs/partition-by-clause/", 
                             "parent": "SQL Commands", 
                             "previous_title": "ALTER SYSTEM", 
                             "previous_url": "/docs/alter-system/", 
@@ -8921,11 +9005,32 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "DROP VIEW", 
-                            "next_url": "/docs/drop-view/", 
+                            "next_title": "CREATE VIEW", 
+                            "next_url": "/docs/create-view/", 
                             "parent": "SQL Commands", 
                             "previous_title": "CREATE TABLE AS (CTAS)", 
                             "previous_url": "/docs/create-table-as-ctas/", 
+                            "relative_path": "_docs/sql-reference/sql-commands/035-partition-by-clause.md", 
+                            "title": "PARTITION BY Clause", 
+                            "url": "/docs/partition-by-clause/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "SQL Commands", 
+                                    "url": "/docs/sql-commands/"
+                                }, 
+                                {
+                                    "title": "SQL Reference", 
+                                    "url": "/docs/sql-reference/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "DROP VIEW", 
+                            "next_url": "/docs/drop-view/", 
+                            "parent": "SQL Commands", 
+                            "previous_title": "PARTITION BY Clause", 
+                            "previous_url": "/docs/partition-by-clause/", 
                             "relative_path": "_docs/sql-reference/sql-commands/050-create-view.md", 
                             "title": "CREATE VIEW", 
                             "url": "/docs/create-view/"
@@ -10328,8 +10433,8 @@
             "next_title": "Using Apache Drill with Tableau 9 Server", 
             "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
             "parent": "Using Drill with BI Tools", 
-            "previous_title": "Using Tibco Spotfire with Drill", 
-            "previous_url": "/docs/using-tibco-spotfire-with-drill/", 
+            "previous_title": "Configuring Tibco Spotfire Server with Drill", 
+            "previous_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
             "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/060-using-apache-drill-with-tableau-9-desktop.md", 
             "title": "Using Apache Drill with Tableau 9 Desktop", 
             "url": "/docs/using-apache-drill-with-tableau-9-desktop/"
@@ -10536,8 +10641,8 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Using Apache Drill with Tableau 9 Desktop", 
-                    "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
+                    "next_title": "Configuring Tibco Spotfire Server with Drill", 
+                    "next_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                     "parent": "Using Drill with BI Tools", 
                     "previous_title": "Using MicroStrategy Analytics with Apache Drill", 
                     "previous_url": "/docs/using-microstrategy-analytics-with-apache-drill/", 
@@ -10557,11 +10662,32 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Using Apache Drill with Tableau 9 Server", 
-                    "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                    "next_title": "Using Apache Drill with Tableau 9 Desktop", 
+                    "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                     "parent": "Using Drill with BI Tools", 
                     "previous_title": "Using Tibco Spotfire with Drill", 
                     "previous_url": "/docs/using-tibco-spotfire-with-drill/", 
+                    "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md", 
+                    "title": "Configuring Tibco Spotfire Server with Drill", 
+                    "url": "/docs/configuring-tibco-spotfire-server-with-drill/"
+                }, 
+                {
+                    "breadcrumbs": [
+                        {
+                            "title": "Using Drill with BI Tools", 
+                            "url": "/docs/using-drill-with-bi-tools/"
+                        }, 
+                        {
+                            "title": "ODBC/JDBC Interfaces", 
+                            "url": "/docs/odbc-jdbc-interfaces/"
+                        }
+                    ], 
+                    "children": [], 
+                    "next_title": "Using Apache Drill with Tableau 9 Server", 
+                    "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                    "parent": "Using Drill with BI Tools", 
+                    "previous_title": "Configuring Tibco Spotfire Server with Drill", 
+                    "previous_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                     "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/060-using-apache-drill-with-tableau-9-desktop.md", 
                     "title": "Using Apache Drill with Tableau 9 Desktop", 
                     "url": "/docs/using-apache-drill-with-tableau-9-desktop/"
@@ -10710,8 +10836,8 @@
                 }
             ], 
             "children": [], 
-            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
-            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
+            "next_title": "Configuring Tibco Spotfire Server with Drill", 
+            "next_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
             "parent": "Using Drill with BI Tools", 
             "previous_title": "Using MicroStrategy Analytics with Apache Drill", 
             "previous_url": "/docs/using-microstrategy-analytics-with-apache-drill/", 
@@ -12464,8 +12590,8 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
-                            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
+                            "next_title": "Configuring Tibco Spotfire Server with Drill", 
+                            "next_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using MicroStrategy Analytics with Apache Drill", 
                             "previous_url": "/docs/using-microstrategy-analytics-with-apache-drill/", 
@@ -12485,11 +12611,32 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Using Apache Drill with Tableau 9 Server", 
-                            "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "next_title": "Using Apache Drill with Tableau 9 Desktop", 
+                            "next_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using Tibco Spotfire with Drill", 
                             "previous_url": "/docs/using-tibco-spotfire-with-drill/", 
+                            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md", 
+                            "title": "Configuring Tibco Spotfire Server with Drill", 
+                            "url": "/docs/configuring-tibco-spotfire-server-with-drill/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "Using Drill with BI Tools", 
+                                    "url": "/docs/using-drill-with-bi-tools/"
+                                }, 
+                                {
+                                    "title": "ODBC/JDBC Interfaces", 
+                                    "url": "/docs/odbc-jdbc-interfaces/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "Using Apache Drill with Tableau 9 Server", 
+                            "next_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "parent": "Using Drill with BI Tools", 
+                            "previous_title": "Configuring Tibco Spotfire Server with Drill", 
+                            "previous_url": "/docs/configuring-tibco-spotfire-server-with-drill/", 
                             "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/060-using-apache-drill-with-tableau-9-desktop.md", 
                             "title": "Using Apache Drill with Tableau 9 Desktop", 
                             "url": "/docs/using-apache-drill-with-tableau-9-desktop/"
@@ -13908,8 +14055,8 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "CREATE VIEW", 
-                            "next_url": "/docs/create-view/", 
+                            "next_title": "PARTITION BY Clause", 
+                            "next_url": "/docs/partition-by-clause/", 
                             "parent": "SQL Commands", 
                             "previous_title": "ALTER SYSTEM", 
                             "previous_url": "/docs/alter-system/", 
@@ -13929,11 +14076,32 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "DROP VIEW", 
-                            "next_url": "/docs/drop-view/", 
+                            "next_title": "CREATE VIEW", 
+                            "next_url": "/docs/create-view/", 
                             "parent": "SQL Commands", 
                             "previous_title": "CREATE TABLE AS (CTAS)", 
                             "previous_url": "/docs/create-table-as-ctas/", 
+                            "relative_path": "_docs/sql-reference/sql-commands/035-partition-by-clause.md", 
+                            "title": "PARTITION BY Clause", 
+                            "url": "/docs/partition-by-clause/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "SQL Commands", 
+                                    "url": "/docs/sql-commands/"
+                                }, 
+                                {
+                                    "title": "SQL Reference", 
+                                    "url": "/docs/sql-reference/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "DROP VIEW", 
+                            "next_url": "/docs/drop-view/", 
+                            "parent": "SQL Commands", 
+                            "previous_title": "PARTITION BY Clause", 
+                            "previous_url": "/docs/partition-by-clause/", 
                             "relative_path": "_docs/sql-reference/sql-commands/050-create-view.md", 
                             "title": "CREATE VIEW", 
                             "url": "/docs/create-view/"

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/data-sources-and-file-formats/040-parquet-format.md
----------------------------------------------------------------------
diff --git a/_docs/data-sources-and-file-formats/040-parquet-format.md b/_docs/data-sources-and-file-formats/040-parquet-format.md
index 5cfc83f..c9070b7 100644
--- a/_docs/data-sources-and-file-formats/040-parquet-format.md
+++ b/_docs/data-sources-and-file-formats/040-parquet-format.md
@@ -21,7 +21,7 @@ Apache Drill includes the following support for Parquet:
 When a read of Parquet data occurs, Drill loads only the necessary columns of data, which reduces I/O. Reading only a small piece of the Parquet data from a data file or table, Drill can examine and analyze all values for a column across multiple files. You can create a Drill table from one format and store the data in another format, including Parquet.
 
 ## Writing Parquet Files
-CREATE TABLE AS SELECT (CTAS) can use any data source provided by the storage plugin. To write Parquet data using the CTAS command, set the session store.format option as shown in the next section. Alternatively, configure the storage plugin to point to the directory containing the Parquet files.
+CREATE TABLE AS (CTAS) can use any data source provided by the storage plugin. To write Parquet data using the CTAS command, set the session store.format option as shown in the next section. Alternatively, configure the storage plugin to point to the directory containing the Parquet files.
 
 Although the data resides in a single table, Parquet output generally consists of multiple files that resemble MapReduce output having numbered file names,  such as 0_0_0.parquet in a directory.
 

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/developer-information/develop-drill/010-compiling-drill-from-source.md
----------------------------------------------------------------------
diff --git a/_docs/developer-information/develop-drill/010-compiling-drill-from-source.md b/_docs/developer-information/develop-drill/010-compiling-drill-from-source.md
index 248ab81..a10a842 100644
--- a/_docs/developer-information/develop-drill/010-compiling-drill-from-source.md
+++ b/_docs/developer-information/develop-drill/010-compiling-drill-from-source.md
@@ -24,15 +24,10 @@ Maven and JDK installed:
 
 ## 2\. Compile the Code
 
-    cd incubator-drill
+    cd drill
     mvn clean install -DskipTests
 
-## 3\. Explode the Tarball in the Installation Directory
-
-    mkdir ~/compiled-drill
-    tar xvzf distribution/target/*.tar.gz --strip=1 -C ~/compiled-drill
-
-Now that you have Drill installed, you can connect to Drill and query sample
+The tarball appears in distribution/target. Move the tarball to a directory for unpacking, unpack, and then you can connect to Drill and query sample
 data or you can connect Drill to your data sources.
 
   * To connect Drill to your data sources, refer to [Connect to Data Sources]({{ site.baseurl }}/docs/connect-a-data-source-introduction) for instructions.

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/developer-information/develop-drill/020-drill-patch-review-tool.md
----------------------------------------------------------------------
diff --git a/_docs/developer-information/develop-drill/020-drill-patch-review-tool.md b/_docs/developer-information/develop-drill/020-drill-patch-review-tool.md
index 5144d36..8edf2bf 100644
--- a/_docs/developer-information/develop-drill/020-drill-patch-review-tool.md
+++ b/_docs/developer-information/develop-drill/020-drill-patch-review-tool.md
@@ -128,7 +128,7 @@ review board. So you need to configure an override to use the non-http url.
 You can do this by adding a config file like this:
 
 	jkreps$ cat ~/.reviewboardrc
-	REPOSITORY = 'git://git.apache.org/incubator-drill.git'
+	REPOSITORY = 'git://git.apache.org/drill.git'
 	TARGET_GROUPS = 'drill-git'
 GUESS_FIELDS = True
 

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md
----------------------------------------------------------------------
diff --git a/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md b/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md
index 436776e..c68b07d 100644
--- a/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md
+++ b/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/050-configuring-spotfire-server.md
@@ -1 +1,122 @@
----
title: "Configuring Tibco Spotfire Server with Drill"
parent: "Using Drill with BI Tools"
---
This document describes how to configure Tibco Spotfire Server (TSS) to integrate with Apache Drill and explore multiple data formats instantly on Hadoop. Users can combine these powerful platforms to rapidly gain analytical access to a wide variety of data types. 

Complete the following steps to configure and use Apache Drill with TSS: 

1. Install the Drill JDBC driver with TSS.
2. Configure the Drill Data Source Template in TSS with the TSS configuration tool.
3. Configure Drill data sources with Tibco Spotfire Desktop and Information Designer.
4. Query and analyze various data formats with Tibco Spotfire and Drill.


----------


### Step 1: Install and Configure the Drill JDBC Driver 


Drill provides standard JDBC connectivity, making it easy to integrate data exploration capabilities on complex, schema-less data sets. Tibco Spotfire Server (TSS) requires Drill 1.0 or later, whic
 h incudes the JDBC driver. The JDBC driver is bundled with the Drill configuration files, and it is recommended that you use the JDBC driver that is shipped with the specific Drill version.

For general instructions to install the Drill JDBC driver, see [Using JDBC](http://drill.apache.org/docs/using-jdbc/).
Complete the following steps to install and configure the JDBC driver for TSS:

1. Locate the JDBC driver in the Drill installation directory:  
   `<drill-home>/jars/jdbc-driver/drill-jdbc-all-<drill-version>.jar`  
   For example, on a MapR cluster:  
   `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/drill-jdbc-all-1.0.0-SNAPSHOT.jar`

2. Locate the TSS library directory and copy the JDBC driver file to that directory: 
   `<TSS-home-directory>/tomcat/lib`  
   For example, on a Linux server:  
   `/usr/local/bin/tibco/tss/6.0.3/tomcat/lib`  
   For example, on a Windows server:  
   `C:\Program Files\apache-tomcat\lib`

3. Restart TSS to load the JDBC driver.
4. Verify that th
 e TSS system can resolve the hostnames of the ZooKeeper nodes for the Drill cluster. You can do this by validating that DNS is properly configured for the TSS system and all the ZooKeeper nodes. Alternatively, you can add the hostnames and IP addresses of the ZooKeeper nodes to the TSS system hosts file.  
   For Linux systems, the hosts file is located here: 
   `/etc/hosts`  
   For Windows systems, the hosts file is located here: 
   `%WINDIR%\system32\drivers\etc\hosts`

----------

### Step 2: Configure the Drill Data Source Template in TSS

The Drill Data Source template can now be configured with the TSS Configuration Tool. The Windows-based TSS Configuration Tool is recommended. If TSS is installed on a Linux system, you also need to install TSS on a small Windows-based system so you can utilize the Configuration Tool. In this case, it is also recommended that you install the Drill JDBC driver on the TSS Windows system.

1. Click **Start > All Programs > TIBCO Spotfire Serve
 r > Configure TIBCO Spotfire Server**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-start.png)
2. Enter the Configuration Tool password that was specified when TSS was initially installed.
3. Once the Configuration Tool has connected to TSS, click the **Configuration** tab, then **Data Source Templates**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-configtab.png)
4. In the Data Source Templates window, click the **New** button at the bottom of the window. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-new.png)
5. Provide a name for the data source template, then copy the following XML template into the **Data Source Template** box. When complete, click **OK**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-template.png)
6. The new entry will now be available in the data source template. Check the box next to the new entry, then click **Save Configuration**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfi
 re-server-saveconfig.png)
7. Select Database as the destination and click Next. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-saveconfig2.png) 
8. Add a comment to the updated configuration and click **Finish**. 
9. A response window is displayed to state that the configuration was successfully uploaded to TSS. Click **OK**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-importconfig.png)
10. Restart TSS to enable it to use the Drill data source template.
   
#### XML Template

Make sure that you enter the correct ZooKeeper node name instead of `<zk-node>`, as well as the correct Drill cluster name instead of `<drill-cluster-name>` in the example below. This is just a template that will appear whenever a data source is configured. The hostnames of ZooKeeper nodes and the Drill cluster name can be found in the `$DRILL_HOME/conf/drill-override.conf` file on any of the Drill nodes in the cluster.
     
    <jdbc-type-settings>
    <type-name>drill</ty
 pe-name>
    <driver>org.apache.drill.jdbc.Driver</driver> 
    <connection-url-pattern>jdbc:drill:zk=<zk-node>:5181/drill/<drill-cluster-name>-drillbits</connection-url-pattern> 
    <ping-command>SELECT 1 FROM sys.version</ping-command>
    <supports-catalogs>true</supports-catalogs>
    <supports-schemas>true</supports-schemas>
    <supports-procedures>false</supports-procedures>
    <table-expression-pattern>[$$schema$$.]$$table$$</table-expression-pattern>
 
    <column-name-pattern>`$$name$$`</column-name-pattern>
    <table-name-pattern>`$$name$$`</table-name-pattern>
    <schema-name-pattern>`$$name$$`</schema-name-pattern>
    <catalog-name-pattern>`$$name$$`</catalog-name-pattern>
    <procedure-name-pattern>`$$name$$`</procedure-name-pattern>
    <column-alias-pattern>`$$name$$`</column-alias-pattern>

    <java-to-sql-type-conversions>
     <type-mapping>
      <from max-length="32672">String</from>
      <to>VARCHAR($$value$$)</to>
     </type-mapping>
     <type-mappin
 g>
      <from>String</from>
      <to>VARCHAR(32672)</to>
     </type-mapping>
     <type-mapping>
      <from>Integer</from>
      <to>INTEGER</to>
     </type-mapping>
    </java-to-sql-type-conversions>
    </jdbc-type-settings>


----------

### Step 3: Configure Drill Data Sources with Tibco Spotfire Desktop 

To configure Drill data sources in TSS, you need to use the Tibco Spotfire Desktop client.

1. Open Tibco Spotfire Desktop. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-client.png)
2. Log into TSS. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-tss.png)
3. Select the deployment area in TSS to be used. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-deployment.png)
4. Click **Tools > Information Designer**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infodesigner.png)
5. In the Information Designer, click **New > Data Source**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infodesign
 er2.png)
6. In the Data Source window, enter the name for the data source. Select the Drill Data Source template created in Step 2 as the type. Update the connection URL with the correct hostname of the ZooKeeper node(s) and the Drill cluster name. Note: The Zookeeper node(s) hostname(s) and Drill cluster name can be found in the `$DRILL_HOME/conf/drill-override.conf` file on any of the Drill nodes in the cluster. Enter the username and password used to connect to Drill. When completed, click **Save**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-connectionURL.png)
7. In the Save As window, verify the name and the folder where you want to save the new data source in TSS. Click **Save** when done. TSS will now validate the information and save the new data source in TSS.
8. When the data source is saved, it will appear in the **Data Sources** tab, and you will be able to navigate the schema. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-datasource
 s-tab.png)


----------

### Step 4: Query and Analyze the Data

After the Drill data source has been configured in the Information Designer, the information elements can be defined. 

1.	In this example all the columns of a Hive table have been defined, using the Drill data source, and added to an information link. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infolink.png)
2.	The SQL syntax to retrieve the data can be validated by clicking the **SQL** button. Many other operations can be performed in Information Link,  including joins, filters, and so on. See the Tibco Spotfire documentation for details.
3.	You can now import the data of this table into TSS by clicking the **Open Data** button. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-hiveorders.png)
The data is now available in Tibco Spotfire Desktop to create various reports and tables as needed, and to be shared. For more information about creating charts, tables and reports, see the Tib
 co Spotfire documentation.


...
\ No newline at end of file
+---
+title: "Configuring Tibco Spotfire Server with Drill"
+parent: "Using Drill with BI Tools"
+---
+
+This document describes how to configure Tibco Spotfire Server (TSS) to integrate with Apache Drill and explore multiple data formats instantly on Hadoop. Users can combine these powerful platforms to rapidly gain analytical access to a wide variety of data types. 
+
+Complete the following steps to configure and use Apache Drill with TSS: 
+
+1. Install the Drill JDBC driver with TSS.
+2. Configure the Drill Data Source Template in TSS with the TSS configuration tool.
+3. Configure Drill data sources with Tibco Spotfire Desktop and Information Designer.
+4. Query and analyze various data formats with Tibco Spotfire and Drill.
+
+----------
+
+### Step 1: Install and Configure the Drill JDBC Driver 
+
+Drill provides standard JDBC connectivity, making it easy to integrate data exploration capabilities on complex, schema-less data sets. Tibco Spotfire Server (TSS) requires Drill 1.0 or later, which incudes the JDBC driver. The JDBC driver is bundled with the Drill configuration files, and it is recommended that you use the JDBC driver that is shipped with the specific Drill version.
+
+For general instructions to install the Drill JDBC driver, see [Using JDBC]({{site.baseurl}}/docs/using-jdbc/).
+Complete the following steps to install and configure the JDBC driver for TSS:
+
+1. Locate the JDBC driver in the Drill installation directory:  
+   `<drill-home>/jars/jdbc-driver/drill-jdbc-all-<drill-version>.jar`  
+   For example, on a MapR cluster:  
+   `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/drill-jdbc-all-1.0.0-SNAPSHOT.jar`
+
+2. Locate the TSS library directory and copy the JDBC driver file to that directory: 
+   `<TSS-home-directory>/tomcat/lib`  
+   For example, on a Linux server:  
+   `/usr/local/bin/tibco/tss/6.0.3/tomcat/lib`  
+   For example, on a Windows server:  
+   `C:\Program Files\apache-tomcat\lib`
+
+3. Restart TSS to load the JDBC driver.
+4. Verify that the TSS system can resolve the hostnames of the ZooKeeper nodes for the Drill cluster. You can do this by validating that DNS is properly configured for the TSS system and all the ZooKeeper nodes. Alternatively, you can add the hostnames and IP addresses of the ZooKeeper nodes to the TSS system hosts file.  
+   For Linux systems, the hosts file is located here: 
+   `/etc/hosts`  
+   For Windows systems, the hosts file is located here: 
+   `%WINDIR%\system32\drivers\etc\hosts`
+
+----------
+
+### Step 2: Configure the Drill Data Source Template in TSS
+
+The Drill Data Source template can now be configured with the TSS Configuration Tool. The Windows-based TSS Configuration Tool is recommended. If TSS is installed on a Linux system, you also need to install TSS on a small Windows-based system so you can utilize the Configuration Tool. In this case, it is also recommended that you install the Drill JDBC driver on the TSS Windows system.
+
+1. Click **Start > All Programs > TIBCO Spotfire Server > Configure TIBCO Spotfire Server**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-start.png)
+2. Enter the Configuration Tool password that was specified when TSS was initially installed.
+3. Once the Configuration Tool has connected to TSS, click the **Configuration** tab, then **Data Source Templates**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-configtab.png)
+4. In the Data Source Templates window, click the **New** button at the bottom of the window. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-new.png)
+5. Provide a name for the data source template, then copy the following XML template into the **Data Source Template** box. When complete, click **OK**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-template.png)
+6. The new entry will now be available in the data source template. Check the box next to the new entry, then click **Save Configuration**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-saveconfig.png)
+7. Select Database as the destination and click Next. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-saveconfig2.png) 
+8. Add a comment to the updated configuration and click **Finish**. 
+9. A response window is displayed to state that the configuration was successfully uploaded to TSS. Click **OK**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-importconfig.png)
+10. Restart TSS to enable it to use the Drill data source template.
+   
+#### XML Template
+
+Make sure that you enter the correct ZooKeeper node name instead of `<zk-node>`, as well as the correct Drill cluster name instead of `<drill-cluster-name>` in the example below. This is just a template that will appear whenever a data source is configured. The hostnames of ZooKeeper nodes and the Drill cluster name can be found in the `$DRILL_HOME/conf/drill-override.conf` file on any of the Drill nodes in the cluster.
+     
+      <jdbc-type-settings>
+      <type-name>drill</type-name>
+      <driver>org.apache.drill.jdbc.Driver</driver> 
+      <connection-url-pattern>jdbc:drill:zk=<zk-node>:5181/drill/<drill-cluster-name>-drillbits</connection-url-pattern> 
+      <ping-command>SELECT 1 FROM sys.version</ping-command>
+      <supports-catalogs>true</supports-catalogs>
+      <supports-schemas>true</supports-schemas>
+      <supports-procedures>false</supports-procedures>
+      <table-expression-pattern>[$$schema$$.]$$table$$</table-expression-pattern>
+   
+      <column-name-pattern>`$$name$$`</column-name-pattern>
+      <table-name-pattern>`$$name$$`</table-name-pattern>
+      <schema-name-pattern>`$$name$$`</schema-name-pattern>
+      <catalog-name-pattern>`$$name$$`</catalog-name-pattern>
+      <procedure-name-pattern>`$$name$$`</procedure-name-pattern>
+      <column-alias-pattern>`$$name$$`</column-alias-pattern>
+
+      <java-to-sql-type-conversions>
+       <type-mapping>
+        <from max-length="32672">String</from>
+        <to>VARCHAR($$value$$)</to>
+       </type-mapping>
+       <type-mapping>
+        <from>String</from>
+        <to>VARCHAR(32672)</to>
+       </type-mapping>
+       <type-mapping>
+        <from>Integer</from>
+        <to>INTEGER</to>
+       </type-mapping>
+      </java-to-sql-type-conversions>
+      </jdbc-type-settings>
+
+
+----------
+
+### Step 3: Configure Drill Data Sources with Tibco Spotfire Desktop 
+
+To configure Drill data sources in TSS, you need to use the Tibco Spotfire Desktop client.
+
+1. Open Tibco Spotfire Desktop. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-client.png)
+2. Log into TSS. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-tss.png)
+3. Select the deployment area in TSS to be used. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-deployment.png)
+4. Click **Tools > Information Designer**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infodesigner.png)
+5. In the Information Designer, click **New > Data Source**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infodesigner2.png)
+6. In the Data Source window, enter the name for the data source. Select the Drill Data Source template created in Step 2 as the type. Update the connection URL with the correct hostname of the ZooKeeper node(s) and the Drill cluster name. Note: The Zookeeper node(s) hostname(s) and Drill cluster name can be found in the `$DRILL_HOME/conf/drill-override.conf` file on any of the Drill nodes in the cluster. Enter the username and password used to connect to Drill. When completed, click **Save**. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-connectionURL.png)
+7. In the Save As window, verify the name and the folder where you want to save the new data source in TSS. Click **Save** when done. TSS will now validate the information and save the new data source in TSS.
+8. When the data source is saved, it will appear in the **Data Sources** tab, and you will be able to navigate the schema. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-datasources-tab.png)
+
+----------
+
+### Step 4: Query and Analyze the Data
+
+After the Drill data source has been configured in the Information Designer, the information elements can be defined. 
+
+1.  In this example all the columns of a Hive table have been defined, using the Drill data source, and added to an information link. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-infolink.png)
+2.  The SQL syntax to retrieve the data can be validated by clicking the **SQL** button. Many other operations can be performed in Information Link,  including joins, filters, and so on. See the Tibco Spotfire documentation for details.
+3.  You can now import the data of this table into TSS by clicking the **Open Data** button. ![drill query flow]({{ site.baseurl }}/docs/img/spotfire-server-hiveorders.png)
+The data is now available in Tibco Spotfire Desktop to create various reports and tables as needed, and to be shared. For more information about creating charts, tables and reports, see the Tibco Spotfire documentation.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/performance-tuning/020-partition-pruning.md
----------------------------------------------------------------------
diff --git a/_docs/performance-tuning/020-partition-pruning.md b/_docs/performance-tuning/020-partition-pruning.md
index 307110f..49be254 100755
--- a/_docs/performance-tuning/020-partition-pruning.md
+++ b/_docs/performance-tuning/020-partition-pruning.md
@@ -5,17 +5,30 @@ parent: "Performance Tuning"
 
 Partition pruning is a performance optimization that limits the number of files and partitions that Drill reads when querying file systems and Hive tables. When you partition data, Drill only reads a subset of the files that reside in a file system or a subset of the partitions in a Hive table when a query matches certain filter criteria.
  
-The query planner in Drill evaluates the filters as part of a Filter operator. If no partition filters are present, the underlying Scan operator reads all files in all directories and then sends the data to operators downstream, such as Filter. When partition filters are present, the query planner determines if it can push the filters down to the Scan such that the Scan only reads the directories that match the partition filters, thus reducing disk I/O.
+The query planner in Drill performs partition pruning by evaluating the filters. If no partition filters are present, the underlying Scan operator reads all files in all directories and then sends the data to operators, such as Filter, downstream. When partition filters are present, the query planner pushes the filters down to the Scan if possible. The Scan reads only the directories that match the partition filters, thus reducing disk I/O.
 
-## Determining a Partitioning Scheme  
+## How to Use Partition Pruning
 
-You can organize your data in such a way that maximizes partition pruning in Drill to optimize performance. Currently, you must partition data manually for a query to take advantage of partition pruning in Drill.
+You can partition data manually or automatically to take advantage of partition pruning in Drill. In Drill 1.0 and earlier, you need to organize your data in such a way to take advantage of partition pruning. In Drill 1.1.0 and later, if the data source is Parquet, you can partition data automatically using CTAS--no data organization tasks required. 
+
+## Automatic Partitioning
+Automatic partitioning in Drill 1.1.0 and later occurs when you write Parquet date using the [[PARTITION BY]({{site.baseurl}}/docs/partition-by-clause/) clause in the CTAS statemebnt.
+
+Automatic partitioning creates separate files, but not separate directories, for different partitions. Each file contains exactly one partition value, but there could be multiple files for the same partition value.
+
+Partition pruning uses the Parquet column statistics to determine which columns to use to prune.
+
+## Manual Partitioning
  
-Partitioning data requires you to determine a partitioning scheme, or a logical way to store the data in a hierarchy of directories. You can then use CTAS to create Parquet files from the original data, specifying filter conditions, and then move the files into the correlating directories in the hierarchy. Once you have partitioned the data, you can create and query views on the data.
+1. Devise a logical way to store the data in a hierarchy of directories. 
+2. Use CTAS to create Parquet files from the original data, specifying filter conditions.
+3. Move the files into directories in the hierarchy. 
+
+After partitioning the data, create and query views on the data.
  
-### Partitioning Example  
+### Manual Partitioning Example  
 
-If you have several text files with log data which span multiple years, and you want to partition the data by year and quarter, you could create the following hierarchy of directories:  
+Suppose you have text files containing several years of log data. To partition the data by year and quarter, create the following hierarchy of directories:  
        
        …/logs/1994/Q1  
        …/logs/1994/Q2  
@@ -30,7 +43,7 @@ If you have several text files with log data which span multiple years, and you
        …/logs/1996/Q3  
        …/logs/1996/Q4  
 
-Once the directory structure is in place, run CTAS with a filter condition in the year and quarter for Q1 1994.
+Run the following CTAS statement, filtering on the Q1 1994 data.
  
           CREATE TABLE TT_1994_Q1 
               AS SELECT * FROM <raw table data in text format >

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/query-data/010-query-data-introduction.md
----------------------------------------------------------------------
diff --git a/_docs/query-data/010-query-data-introduction.md b/_docs/query-data/010-query-data-introduction.md
index 980c975..1d5ae5d 100644
--- a/_docs/query-data/010-query-data-introduction.md
+++ b/_docs/query-data/010-query-data-introduction.md
@@ -2,39 +2,41 @@
 title: "Query Data Introduction"
 parent: "Query Data"
 ---
-You can query local and distributed file systems, Hive, and HBase data sources
-registered with Drill. You issue the `USE
-<storage plugin>` statement to run your queries against a particular storage plugin. You use dot notation and back ticks to specify the storage plugin name and sometimes the workspace name. For example, to use the dfs storage plugin and default workspace, issue this command: ``USE dfs.`default``
+You can query local and distributed file systems, Hive, HBase data, complex data, INFORMATION SCHEMA, and system tables as described in the subtopics of this section. 
 
-Alternatively, you can omit the USE statement, and specify the storage plugin and workspace name using dot notation and back ticks. For example:
+The query specifies the data source location and include data casting. 
+
+## Specifying the Data Source Location
+The optional [USE statement]({{site.baseurl}}/docs/use) runs subsequent queries against a particular [storage plugin]({{site.baseurl}}/docs/connect-a-data-source-introduction/). The USE statement typically saves typing some of the storage plugin information in the FROM statement. If you omit the USE statement, specify a storage plugin, such as dfs, and optionally a workspace, such as default, and a path to the data source using dot notation and back ticks. For example:
 
 ``dfs.`default`.`/Users/drill-user/apache-drill-1.0.0/log/sqlline_queries.json```;
 
-You may need to use casting functions in some queries. For example, you may
-have to cast a string `"100"` to an integer in order to apply a math function
+## Casting Data
+In some cases, Drill converts schema-less data to correctly-typed data implicitly. In this case, you do not need to [cast the data]({{site.baseurl}}/docs/supported-data-types/#casting-and-converting-data-types) to another type. The file format of the data and the nature of your query determines the requirement for casting or converting. Differences in casting depend on the data source. 
+
+For example, you have to cast a string `"100"` in a JSON file to an integer in order to apply a math function
 or an aggregate function.
 
-You can use the EXPLAIN command to analyze errors and troubleshoot queries
+Use CONVERT_TO and CONVERT_FROM instead of the CAST function for converting binary data types, as described in section "[CONVERT_TO and CONVERT_FROM Usage Notes](/docs/data-type-conversion/#convert_to-and-convert_from-usage-notes)".
+
+## Troubleshooting Queries
+
+In addition to testing queries interactively in the Drill shell, and examining error messages, use the [EXPLAIN command]({{site.baseurl}}/docs/explain/) to analyze errors and troubleshoot queries
 that do not run. For example, if you run into a casting error, the query plan
 text may help you isolate the problem.
 
     0: jdbc:drill:zk=local> !set maxwidth 10000
     0: jdbc:drill:zk=local> explain plan for select ... ;
 
-The set command increases the default text display (number of characters). By
+[Drill shell commands]({{site.baseurl}}/docs/configuring-the-drill-shell/) include the `!set <set variable> <value>` to increase the default text display (number of characters). By
 default, most of the plan output is hidden.
 
+## Query Syntax Tips
+
 Remember the following tips when querying data with Drill:
 
-  * Include a semicolon at the end of SQL statements, except when you issue a command with an exclamation point `(!).   
+  * Include a semicolon at the end of SQL statements, except when you issue a [Drill shell command]({{site.baseurl}}/docs/configuring-the-drill-shell/).   
     `Example: `!set maxwidth 10000`
-  * Use backticks around file and directory names that contain special characters and also around reserved words when you query a file system.   
-    The following special characters require backticks:
-
-    * . (period)
-    * / (forward slash)
-    * _ (underscore)
-    Example: ``SELECT * FROM dfs.default.`sample_data/my_sample.json`; ``
-  * `CAST` data to `VARCHAR` if an expression in a query returns `VARBINARY` as the result type in order to view the `VARBINARY` types as readable data. If you do not use the `CAST` function, Drill returns the results as byte data.    
-     Example: `CAST (VARBINARY_expr as VARCHAR(50))`
+  * Use backticks around [keywords]({{site.baseurl}}/docs/reserved-keywords), special characters, and [identifiers]({{site.baseurl}}/docs/lexical-structure/#identifier) that SQL cannot parse, such as the keyword default and a path that contains a forward slash character:
+    Example: ``SELECT * FROM dfs.`default`.`/Users/drilluser/apache-drill-1.1.0-SNAPSHOT/sample-data/nation.parquet`;``
   * When selecting all (SELECT *) schema-less data, the order of returned columns might differ from the stored order and might vary from query to query.
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/sql-reference/090-sql-extensions.md
----------------------------------------------------------------------
diff --git a/_docs/sql-reference/090-sql-extensions.md b/_docs/sql-reference/090-sql-extensions.md
index ed97611..4896abd 100644
--- a/_docs/sql-reference/090-sql-extensions.md
+++ b/_docs/sql-reference/090-sql-extensions.md
@@ -6,7 +6,7 @@ Drill extends SQL to work with Hadoop-scale data and to explore smaller-scale da
 
 Drill provides language support for pointing to [storage plugin]({{site.baseurl}}/docs/connect-a-data-source-introduction) interfaces that Drill uses to interact with data sources. Use the name of a storage plugin to specify a file system *database* as a prefix in queries when you refer to objects across databases. Query files, including compressed .gz files, and [directories]({{ site.baseurl }}/docs/querying-directories), as you would query an SQL table. You can query multiple files in a directory.
 
-Drill extends the SELECT statement for reading complex, multi-structured data. The extended CREATE TABLE AS SELECT provides the capability to write data of complex/multi-structured data types. Drill extends the [lexical rules](http://drill.apache.org/docs/lexical-structure) for working with files and directories, such as using back ticks for including file names, directory names, and reserved words in queries. Drill syntax supports using the file system as a persistent store for query profiles and diagnostic information.
+Drill extends the SELECT statement for reading complex, multi-structured data. The extended CREATE TABLE AS provides the capability to write data of complex/multi-structured data types. Drill extends the [lexical rules](http://drill.apache.org/docs/lexical-structure) for working with files and directories, such as using back ticks for including file names, directory names, and reserved words in queries. Drill syntax supports using the file system as a persistent store for query profiles and diagnostic information.
 
 ## Extensions for Hive- and HBase-related Data Sources
 

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/sql-reference/data-types/010-supported-data-types.md
----------------------------------------------------------------------
diff --git a/_docs/sql-reference/data-types/010-supported-data-types.md b/_docs/sql-reference/data-types/010-supported-data-types.md
index a30c4b9..89e001c 100644
--- a/_docs/sql-reference/data-types/010-supported-data-types.md
+++ b/_docs/sql-reference/data-types/010-supported-data-types.md
@@ -71,7 +71,7 @@ changes in the data processing, Drill regenerates the code as necessary.
 ## Casting and Converting Data Types
 
 In Drill, you cast or convert data to the required type for moving data from one data source to another or to make the data readable.
-You do not assign a data type to every column name in a CREATE TABLE statement to define the table as you do in database software. Instead, you use the CREATE TABLE AS SELECT (CTAS) statement with one or more of the following functions to define the table:
+You do not assign a data type to every column name in a CREATE TABLE statement to define the table as you do in database software. Instead, you use the CREATE TABLE AS (CTAS) statement with one or more of the following functions to define the table:
 
 * [CAST]({{ site.baseurl }}/docs/data-type-conversion#cast)    
 * [CONVERT TO/FROM]({{ site.baseurl }}/docs/data-type-conversion#convert_to-and-convert_from)   

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/sql-reference/sql-commands/030-create-table-as.md
----------------------------------------------------------------------
diff --git a/_docs/sql-reference/sql-commands/030-create-table-as.md b/_docs/sql-reference/sql-commands/030-create-table-as.md
index f5ba9d3..1669cf6 100644
--- a/_docs/sql-reference/sql-commands/030-create-table-as.md
+++ b/_docs/sql-reference/sql-commands/030-create-table-as.md
@@ -6,7 +6,7 @@ You can create tables in Drill by using the CTAS command:
 
     CREATE TABLE new_table_name AS <query>;
 
-where query is any valid Drill query. Each table you create must have a unique
+where query is a SELECT statement. Each table you create must have a unique
 name. You can include an optional column list for the new table. For example:
 
     create table logtable(transid, prodid) as select transaction_id, product_id from ...
@@ -17,7 +17,7 @@ You can store table data in one of three formats:
   * parquet
   * json
 
-The parquet and json formats can be used to store complex data.
+The parquet and json formats can be used to store complex data. Drill automatically partitions data stored in parquet when you use the [PARTITION BY]({{site.baseurl}}/docs/partition-by-clause) clause.
 
 To set the output format for a Drill table, set the `store.format` option with
 the ALTER SYSTEM or ALTER SESSION command. For example:

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/sql-reference/sql-commands/035-partition-by-clause.md
----------------------------------------------------------------------
diff --git a/_docs/sql-reference/sql-commands/035-partition-by-clause.md b/_docs/sql-reference/sql-commands/035-partition-by-clause.md
new file mode 100644
index 0000000..b34e5e7
--- /dev/null
+++ b/_docs/sql-reference/sql-commands/035-partition-by-clause.md
@@ -0,0 +1,37 @@
+---
+title: "PARTITION BY Clause"
+parent: "SQL Commands"
+---
+You can take advantage of automatic partitioning in Drill 1.1 using the PARTITION BY CLAUSE in the CTAS command:
+
+	CREATE TABLE table_name [ (column_name, . . .) ] 
+    [ PARTITION_BY (column_name, . . .) ] 
+    AS SELECT_statement;
+
+The CTAS statement that uses the PARTITION BY clause must store the data in Parquet format. The CTAS statement needs to meet one of the following requirements:
+
+* The column list in the PARTITION by clause are included in the column list following the table_name
+* The SELECT statement has to use a * column if the base table in the SELECT statement is schema-less, and when the partition column is resolved to * column in a schema-less query, this * column cannot be a result of a join operation. 
+
+
+To create and verify the contents of a table that contains this row:
+
+  1. Set the workspace to a writable workspace.
+  2. Set the `store.format` option to Parquet
+  3. Run a CTAS statement with the PARTITION BY clause.
+  4. Go to the directory where the table is stored and check the contents of the file.
+  5. Run a query against the new table.
+
+Examples:
+
+	CREATE TABLE mytable1 PARTITION BY (r_regionkey) AS 
+	  SELECT r_regionkey, r_name FROM cp.`tpch/region.parquet`
+	CREATE TABLE mytable2 PARTITION BY (r_regionkey) AS 
+	  SELECT * FROM cp.`tpch/region.parquet`
+	CREATE TABLE mytable3 PARTITION BY (r_regionkey) AS
+	  SELECT r.r_regionkey, r.r_name, n.n_nationkey, n.n_name 
+	  FROM cp.`tpch/nation.parquet` n, cp.`tpch/region.parquet` r
+	  WHERE n.n_regionkey = r.r_regionkey
+
+
+

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/sql-reference/sql-functions/020-data-type-conversion.md
----------------------------------------------------------------------
diff --git a/_docs/sql-reference/sql-functions/020-data-type-conversion.md b/_docs/sql-reference/sql-functions/020-data-type-conversion.md
index 77466a3..77770fe 100644
--- a/_docs/sql-reference/sql-functions/020-data-type-conversion.md
+++ b/_docs/sql-reference/sql-functions/020-data-type-conversion.md
@@ -261,7 +261,7 @@ This example assumes you are working in the Drill Sandbox. The `maprdb` storage
 
 ### Convert the Binary HBase Students Table to JSON Data
 
-First, you set the storage format to JSON. Next, you use the CREATE TABLE AS SELECT (CTAS) statement to convert from a selected file of a different format, HBase in this example, to the storage format. You then convert the JSON file to Parquet using a similar procedure. Set the storage format to Parquet, and use a CTAS statement to convert to Parquet from JSON. In each case, you [select UTF8]({{ site.baseurl }}/docs/data-type-conversion/#convert_to-and-convert_from-data-types) as the file format because the data you are converting from and then to consists of strings.
+First, you set the storage format to JSON. Next, you use the CREATE TABLE AS (CTAS) statement to convert from a selected file of a different format, HBase in this example, to the storage format. You then convert the JSON file to Parquet using a similar procedure. Set the storage format to Parquet, and use a CTAS statement to convert to Parquet from JSON. In each case, you [select UTF8]({{ site.baseurl }}/docs/data-type-conversion/#convert_to-and-convert_from-data-types) as the file format because the data you are converting from and then to consists of strings.
 
 1. Start Drill on the Drill Sandbox and set the default storage format from Parquet to JSON.
 

http://git-wip-us.apache.org/repos/asf/drill/blob/00efc0f1/_docs/tutorials/030-analyzing-the-yelp-academic-dataset.md
----------------------------------------------------------------------
diff --git a/_docs/tutorials/030-analyzing-the-yelp-academic-dataset.md b/_docs/tutorials/030-analyzing-the-yelp-academic-dataset.md
index c16964b..308c8c3 100644
--- a/_docs/tutorials/030-analyzing-the-yelp-academic-dataset.md
+++ b/_docs/tutorials/030-analyzing-the-yelp-academic-dataset.md
@@ -315,7 +315,7 @@ Note that Drill views are lightweight, and can just be created in the local
 file system. Drill in standalone mode comes with a dfs.tmp workspace, which we
 can use to create views (or you can can define your own workspaces on a local
 or distributed file system). If you want to persist the data physically
-instead of in a logical view, you can use CREATE TABLE AS SELECT syntax.
+instead of in a logical view, you can use CREATE TABLE AS syntax.
 
     0: jdbc:drill:zk=local> create or replace view dfs.tmp.businessreviews as 
     Select b.name,b.stars,b.state,b.city,r.votes.funny,r.votes.useful,r.votes.cool, r.`date` 


[5/5] drill git commit: Merge remote-tracking branch 'remotes/apache/gh-pages' into gh-pages

Posted by br...@apache.org.
Merge remote-tracking branch 'remotes/apache/gh-pages' into gh-pages


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/3d1c0055
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/3d1c0055
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/3d1c0055

Branch: refs/heads/gh-pages
Commit: 3d1c005541858d39f9f2a4dddb3f25b0a9b7aca5
Parents: 00efc0f d476ce5
Author: Bridget Bevens <bb...@maprtech.com>
Authored: Fri Jun 19 17:01:36 2015 -0700
Committer: Bridget Bevens <bb...@maprtech.com>
Committed: Fri Jun 19 17:01:36 2015 -0700

----------------------------------------------------------------------
 team.md | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)
----------------------------------------------------------------------



[3/5] drill git commit: JReport doc

Posted by br...@apache.org.
JReport doc


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/81820904
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/81820904
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/81820904

Branch: refs/heads/gh-pages
Commit: 81820904d529f35938817a18e347af99689bdcdd
Parents: e8aba59
Author: Bob Rumsby <br...@mapr.com>
Authored: Wed Jun 10 15:57:42 2015 -0700
Committer: Kristine Hahn <kh...@maprtech.com>
Committed: Fri Jun 19 14:43:21 2015 -0700

----------------------------------------------------------------------
 _data/docs.json                                 | 108 ++++++++++++++++---
 _docs/img/jreport-addtable.png                  | Bin 0 -> 95442 bytes
 _docs/img/jreport-catalogbrowser.png            | Bin 0 -> 44988 bytes
 _docs/img/jreport-crosstab.png                  | Bin 0 -> 38885 bytes
 _docs/img/jreport-crosstab2.png                 | Bin 0 -> 59255 bytes
 _docs/img/jreport-crosstab3.png                 | Bin 0 -> 53791 bytes
 _docs/img/jreport-hostsfile.png                 | Bin 0 -> 119798 bytes
 _docs/img/jreport-queryeditor.png               | Bin 0 -> 40606 bytes
 _docs/img/jreport-quotequalifier.png            | Bin 0 -> 71219 bytes
 _docs/img/jreport_setenv.png                    | Bin 0 -> 63047 bytes
 .../odbc-jdbc-interfaces/015-using-jdbc-driver  | 100 +++++++++++++++++
 .../080-configuring-jreport.md                  |  34 ++++++
 12 files changed, 230 insertions(+), 12 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_data/docs.json
----------------------------------------------------------------------
diff --git a/_data/docs.json b/_data/docs.json
index 491fddc..204c4fa 100644
--- a/_data/docs.json
+++ b/_data/docs.json
@@ -1124,6 +1124,27 @@
             "title": "Configuring Drill Memory", 
             "url": "/docs/configuring-drill-memory/"
         }, 
+        "Configuring JReport with Drill": {
+            "breadcrumbs": [
+                {
+                    "title": "Using Drill with BI Tools", 
+                    "url": "/docs/using-drill-with-bi-tools/"
+                }, 
+                {
+                    "title": "ODBC/JDBC Interfaces", 
+                    "url": "/docs/odbc-jdbc-interfaces/"
+                }
+            ], 
+            "children": [], 
+            "next_title": "Query Data", 
+            "next_url": "/docs/query-data/", 
+            "parent": "Using Drill with BI Tools", 
+            "previous_title": "Using Apache Drill with Tableau 9 Server", 
+            "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md", 
+            "title": "Configuring JReport with Drill", 
+            "url": "/docs/configuring-jreport-with-drill/"
+        }, 
         "Configuring Multitenant Resources": {
             "breadcrumbs": [
                 {
@@ -5356,14 +5377,35 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Query Data", 
-                            "next_url": "/docs/query-data/", 
+                            "next_title": "Configuring JReport with Drill", 
+                            "next_url": "/docs/configuring-jreport-with-drill/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using Apache Drill with Tableau 9 Desktop", 
                             "previous_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                             "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/070-using-apache-drill-with-tableau-9-server.md", 
                             "title": "Using Apache Drill with Tableau 9 Server", 
                             "url": "/docs/using-apache-drill-with-tableau-9-server/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "Using Drill with BI Tools", 
+                                    "url": "/docs/using-drill-with-bi-tools/"
+                                }, 
+                                {
+                                    "title": "ODBC/JDBC Interfaces", 
+                                    "url": "/docs/odbc-jdbc-interfaces/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "Query Data", 
+                            "next_url": "/docs/query-data/", 
+                            "parent": "Using Drill with BI Tools", 
+                            "previous_title": "Using Apache Drill with Tableau 9 Server", 
+                            "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md", 
+                            "title": "Configuring JReport with Drill", 
+                            "url": "/docs/configuring-jreport-with-drill/"
                         }
                     ], 
                     "next_title": "Using Drill with BI Tools Introduction", 
@@ -6457,8 +6499,8 @@
             "next_title": "Query Data Introduction", 
             "next_url": "/docs/query-data-introduction/", 
             "parent": "", 
-            "previous_title": "Using Apache Drill with Tableau 9 Server", 
-            "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+            "previous_title": "Configuring JReport with Drill", 
+            "previous_url": "/docs/configuring-jreport-with-drill/", 
             "relative_path": "_docs/070-query-data.md", 
             "title": "Query Data", 
             "url": "/docs/query-data/"
@@ -10287,8 +10329,8 @@
                 }
             ], 
             "children": [], 
-            "next_title": "Query Data", 
-            "next_url": "/docs/query-data/", 
+            "next_title": "Configuring JReport with Drill", 
+            "next_url": "/docs/configuring-jreport-with-drill/", 
             "parent": "Using Drill with BI Tools", 
             "previous_title": "Using Apache Drill with Tableau 9 Desktop", 
             "previous_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
@@ -10519,14 +10561,35 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Query Data", 
-                    "next_url": "/docs/query-data/", 
+                    "next_title": "Configuring JReport with Drill", 
+                    "next_url": "/docs/configuring-jreport-with-drill/", 
                     "parent": "Using Drill with BI Tools", 
                     "previous_title": "Using Apache Drill with Tableau 9 Desktop", 
                     "previous_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                     "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/070-using-apache-drill-with-tableau-9-server.md", 
                     "title": "Using Apache Drill with Tableau 9 Server", 
                     "url": "/docs/using-apache-drill-with-tableau-9-server/"
+                }, 
+                {
+                    "breadcrumbs": [
+                        {
+                            "title": "Using Drill with BI Tools", 
+                            "url": "/docs/using-drill-with-bi-tools/"
+                        }, 
+                        {
+                            "title": "ODBC/JDBC Interfaces", 
+                            "url": "/docs/odbc-jdbc-interfaces/"
+                        }
+                    ], 
+                    "children": [], 
+                    "next_title": "Query Data", 
+                    "next_url": "/docs/query-data/", 
+                    "parent": "Using Drill with BI Tools", 
+                    "previous_title": "Using Apache Drill with Tableau 9 Server", 
+                    "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                    "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md", 
+                    "title": "Configuring JReport with Drill", 
+                    "url": "/docs/configuring-jreport-with-drill/"
                 }
             ], 
             "next_title": "Using Drill with BI Tools Introduction", 
@@ -12392,14 +12455,35 @@
                                 }
                             ], 
                             "children": [], 
-                            "next_title": "Query Data", 
-                            "next_url": "/docs/query-data/", 
+                            "next_title": "Configuring JReport with Drill", 
+                            "next_url": "/docs/configuring-jreport-with-drill/", 
                             "parent": "Using Drill with BI Tools", 
                             "previous_title": "Using Apache Drill with Tableau 9 Desktop", 
                             "previous_url": "/docs/using-apache-drill-with-tableau-9-desktop/", 
                             "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/070-using-apache-drill-with-tableau-9-server.md", 
                             "title": "Using Apache Drill with Tableau 9 Server", 
                             "url": "/docs/using-apache-drill-with-tableau-9-server/"
+                        }, 
+                        {
+                            "breadcrumbs": [
+                                {
+                                    "title": "Using Drill with BI Tools", 
+                                    "url": "/docs/using-drill-with-bi-tools/"
+                                }, 
+                                {
+                                    "title": "ODBC/JDBC Interfaces", 
+                                    "url": "/docs/odbc-jdbc-interfaces/"
+                                }
+                            ], 
+                            "children": [], 
+                            "next_title": "Query Data", 
+                            "next_url": "/docs/query-data/", 
+                            "parent": "Using Drill with BI Tools", 
+                            "previous_title": "Using Apache Drill with Tableau 9 Server", 
+                            "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+                            "relative_path": "_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md", 
+                            "title": "Configuring JReport with Drill", 
+                            "url": "/docs/configuring-jreport-with-drill/"
                         }
                     ], 
                     "next_title": "Using Drill with BI Tools Introduction", 
@@ -12797,8 +12881,8 @@
             "next_title": "Query Data Introduction", 
             "next_url": "/docs/query-data-introduction/", 
             "parent": "", 
-            "previous_title": "Using Apache Drill with Tableau 9 Server", 
-            "previous_url": "/docs/using-apache-drill-with-tableau-9-server/", 
+            "previous_title": "Configuring JReport with Drill", 
+            "previous_url": "/docs/configuring-jreport-with-drill/", 
             "relative_path": "_docs/070-query-data.md", 
             "title": "Query Data", 
             "url": "/docs/query-data/"

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-addtable.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-addtable.png b/_docs/img/jreport-addtable.png
new file mode 100644
index 0000000..db5a464
Binary files /dev/null and b/_docs/img/jreport-addtable.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-catalogbrowser.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-catalogbrowser.png b/_docs/img/jreport-catalogbrowser.png
new file mode 100644
index 0000000..25a1d3a
Binary files /dev/null and b/_docs/img/jreport-catalogbrowser.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-crosstab.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-crosstab.png b/_docs/img/jreport-crosstab.png
new file mode 100644
index 0000000..21cb550
Binary files /dev/null and b/_docs/img/jreport-crosstab.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-crosstab2.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-crosstab2.png b/_docs/img/jreport-crosstab2.png
new file mode 100644
index 0000000..427198c
Binary files /dev/null and b/_docs/img/jreport-crosstab2.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-crosstab3.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-crosstab3.png b/_docs/img/jreport-crosstab3.png
new file mode 100644
index 0000000..46d71ad
Binary files /dev/null and b/_docs/img/jreport-crosstab3.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-hostsfile.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-hostsfile.png b/_docs/img/jreport-hostsfile.png
new file mode 100644
index 0000000..a6486bd
Binary files /dev/null and b/_docs/img/jreport-hostsfile.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-queryeditor.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-queryeditor.png b/_docs/img/jreport-queryeditor.png
new file mode 100644
index 0000000..c0af6f9
Binary files /dev/null and b/_docs/img/jreport-queryeditor.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport-quotequalifier.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport-quotequalifier.png b/_docs/img/jreport-quotequalifier.png
new file mode 100644
index 0000000..3760011
Binary files /dev/null and b/_docs/img/jreport-quotequalifier.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/img/jreport_setenv.png
----------------------------------------------------------------------
diff --git a/_docs/img/jreport_setenv.png b/_docs/img/jreport_setenv.png
new file mode 100644
index 0000000..9fbe143
Binary files /dev/null and b/_docs/img/jreport_setenv.png differ

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver
----------------------------------------------------------------------
diff --git a/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver
new file mode 100755
index 0000000..b2339fe
--- /dev/null
+++ b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver
@@ -0,0 +1,100 @@
+---
+title: "Using the JDBC Driver"
+parent: "ODBC/JDBC Interfaces"
+---
+This section explains how to install and use the JDBC driver for Apache Drill. For specific examples of client tool connections to Drill via JDBC, see [Using JDBC with SQuirreL]({{ site.baseurl }}/docs/.../) and [Configuring Spotfire Server]({{ site.baseurl }}/docs/.../).
+
+
+### Prerequisites
+
+  * JRE 7 or JDK 7
+  * Drill installed either in embedded mode or in distributed mode on one or more nodes in a cluster. Refer to the [Install Drill]({{ site.baseurl }}/docs/install-drill/) documentation for more information.
+  * The client must be able to resolve the actual hostname of the Drill node(s) with the IP(s). Verify that a DNS entry was created on the client machine for the Drill node(s).
+     
+If a DNS entry does not exist, create the entry for the Drill node(s).
+
+    * For Windows, create the entry in the %WINDIR%\system32\drivers\etc\hosts file.
+    * For Linux and Mac OSX, create the entry in /etc/hosts.  
+<drill-machine-IP> <drill-machine-hostname>
+    Example: `127.0.1.1 maprdemo`
+
+
+----------
+
+### Getting the Drill JDBC Driver
+
+The Drill JDBC Driver `JAR` file must exist in a directory on a client machine so you can configure the driver for the application or third-party tool that you intend to use. You can obtain the driver in two different ways:
+
+1. Copy the `drill-jdbc-all` JAR file from the following Drill installation directory on a node where Drill is installed to a directory on your client
+machine:
+
+    <drill_installation_directory>/jars/jdbc-driver/drill-jdbc-all-<version>.jar
+    
+    For example: drill1.0/jdbc-driver/drill-jdbc-all-1.0.0-mapr-r1.jar
+
+2. Download the following tar file to a location on your client machine: [apache-
+drill-1.0.0.tar.gz](http://apache.osuosl.org/drill/drill-1.0.0/apache-drill-1.0.0-src.tar.gz) and extract the file. You may need to use a decompression utility, such as [7-zip](http://www.7-zip.org/). The driver is extracted to the following directory:
+
+    <drill-home>\apache-drill-<version>\jars\jdbc-driver\drill-jdbc-all-<version>.jar
+
+Mac vs windows paths here....
+
+On a MapR cluster, the JDBC driver is installed here: `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/`
+
+----------
+
+### JDBC Driver URLs
+
+To configure a JDBC application, users have to:
+
+1. Put the Drill JDBC jar file on the class path.
+2. Use a valid Drill JDBC URL.
+3. Configure tools or application code with the name of the Drill driver class.
+
+The driver URLs that you use to create JDBC connection strings must be formed as stated in the following sections. 
+
+
+#### Driver Class Name
+
+The class name for the JDBC driver is `org.apache.drill.jdbc.Driver`
+
+#### URL Syntax
+
+The form of the driver's JDBC URLs is as follows. The URL consists of some required and some optional parameters. 
+
+A Drill JDBC URL must start with: `"{{jdbc:drill:}}"`
+
+#### URL Examples
+
+`jdbc:drill:zk=maprdemo:5181`
+
+where `zk=maprdemo:5181` defines the ZooKeeper quorum.
+
+`jdbc:drill:zk=10.10.100.56:5181/drill/drillbits1;schema=hive`
+
+where the ZooKeeper node IP address is provided as well as the Drill directory in ZK and the cluster ID?
+
+`jdbc:drill:zk=10.10.100.30:5181,10.10.100.31:5181,10.10.100.32:5181/drill/drillbits1;schema=hive`
+
+<li>Including a default schema is optional.</li>
+<li>The ZooKeeper port is 2181. In a MapR cluster, the ZooKeeper port is 5181.</li>
+<li>The Drill directory stored in ZooKeeper is <code>/drill</code>.</li>
+<li>The Drill default cluster ID is<code> drillbits1</code>.</li>
+
+---------
+
+### JDBC Driver Configuration Options
+
+To control the behavior of the Drill JDBC driver, you can append the following configuration options to the JDBC URL:
+
+<config options>
+
+
+----------
+
+
+### Related Documentation
+
+When you have connected to Drill through the JDBC Driver, you can issue queries from the JDBC application or client. Start by running
+a test query on some sample data included in the Drill installation.
+

http://git-wip-us.apache.org/repos/asf/drill/blob/81820904/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md
----------------------------------------------------------------------
diff --git a/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md b/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md
new file mode 100644
index 0000000..1782ad1
--- /dev/null
+++ b/_docs/odbc-jdbc-interfaces/using-drill-with-bi-tools/080-configuring-jreport.md
@@ -0,0 +1,34 @@
+---
+title: "Configuring JReport with Drill"
+parent: "Using Drill with BI Tools"
+---
+
+JReport is an embeddable BI solution that empowers users to analyze data and create reports and dashboards. JReport accesses data from Hadoop systems, such as the MapR Distribution through Apache Drill, as well as other big data and transactional data sources. By visualizing data through Drill, users can perform their own reporting and data discovery for agile, on-the-fly decision-making.

You can use JReport 13.1 and the the Apache Drill JDBC Driver to easily extract data from the MapR Distribution and visulaize it, creating reports and dashboards that you can embed into your own applications.
+
Complete the following simple steps to use Apache Drill with JReport:
+

1. Install the Drill JDBC Driver with JReport.
2. Create a new JReport Catalog to manage the Drill connection.
3. Use JReport Designer to query the data and create a report.

----------
+
+### Step 1: Install the Drill JDBC Driver with JReport
+
+Drill provides standard JDBC connectivity to easily integrate with JReport. JReport 13.1 requires Drill 1.0 or later.
+
For general instructions on installing the Drill JDBC driver, see [Using JDBC]({{ site.baseurl }}/docs/using-jdbc/).

1. Locate the JDBC driver in the Drill installation directory on any node where Drill is installed on the cluster: 
+
        <drill-home>/jars/jdbc-driver/drill-jdbc-all-<drill-version>.jar 
   For example:
+
        /opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/drill-jdbc-all-1.0.0.jar
   
2. Copy the Drill JDBC driver into the JReport `lib` folder:
+
        %REPORTHOME%\lib\
+   For example, on Windows, copy the Drill JDBC driver jar file into:
+   
+        C:\JReport\Designer\lib\drill-jdbc-all-1.0.0.jar
    
3.	Add the location of the JAR file to the JReport CLASSPATH variable. On Windows, edit the `C:\JReport\Designer\bin\setenv.bat` file:
+
    ![drill query flow]({{ site.baseurl }}/docs/img/jreport_setenv.png)

4. Verify that the JReport system can resolve the hostnames of the ZooKeeper nodes of the Drill cluster. You can do this by configuring DNS for all of the systems. Alternatively, you can edit the hosts file on the JReport system to include the hostnames and IP addresses of all the ZooKeeper nodes used with the Drill cluster.  For Linux systems, the hosts file is located at `/etc/hosts`. For Windows systems, the hosts file is located at `%WINDIR%\system32\drivers\etc\hosts`  Here is an example of a Windows hosts file: ![drill query flow]({{ site.baseurl }}/docs/img/jreport-hostsfile.png)
+
+----------
+
+### Step 2: Create a New JReport Catalog to Manage the Drill Connection
+
+1.	Click Create **New -> Catalog…**
2.	Provide a catalog file name and click **…** to choose the file-saving location.
3.	Click **View -> Catalog Browser**.
4.	Right-click **Data Source 1** and select **Add JDBC Connection**.
5.	Fill in the **Driver**, **URL**, **User**, and **Password** fields. ![drill query flow]({{ site.baseurl }}/docs/img/jreport-catalogbrowser.png)
6.	Click **Options** and select the **Qualifier** tab. 
7.	In the **Quote Qualifier** section, choose **User Defined** and change the quote character from “ to ` (backtick). ![drill query flow]({{ site.baseurl }}/docs/img/jreport-quotequalifier.png)
8.	Click **OK**. JReport will verify the connection and save all information.
9.	Add tables and views to the JReport catalog by right-clicking the connection node and choosing **Add Table**. Now you can browse the schemas and add specific tables that you want to make available for building queries. ![drill query flow]({{ site.baseurl }}/docs/img/jreport-addtable.png
 )
10.	Click **Done** when you have added all the tables you need. 
+
+
+### Step 3: Use JReport Designer
+
+1.	In the Catalog Browser, right-click **Queries** and select **Add Query…**
2.	Define a JReport query by using the Query Editor. You can also import your own SQL statements. ![drill query flow]({{ site.baseurl }}/docs/img/jreport-queryeditor.png)
3.	Click **OK** to close the Query Editor, and click the **Save Catalog** button to save your progress to the catalog file. 
+
    **Note**: If the report returns errors, you may need to edit the query and add the schema in front of the table name: `select column from schema.table_name` You can do this by clicking the **SQL** button on the Query Editor.

5.  Use JReport Designer to query the data and create a report. ![drill query flow]({{ site.baseurl }}/docs/img/jreport-crosstab.png)
+
    ![drill query flow]({{ site.baseurl }}/docs/img/jreport-crosstab2.png)
+
    ![drill query flow]({{ site.baseurl }}/docs/img/jreport-crosstab3.png)
\ No newline at end of file


[2/5] drill git commit: Update Using JDBC topic

Posted by br...@apache.org.
Update Using JDBC topic


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/cfd4573a
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/cfd4573a
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/cfd4573a

Branch: refs/heads/gh-pages
Commit: cfd4573a2f0dfe36f7355213f5f3e6b6e2dc97a6
Parents: 05f79eb
Author: Bob Rumsby <br...@mapr.com>
Authored: Thu Jun 11 17:49:19 2015 -0700
Committer: Kristine Hahn <kh...@maprtech.com>
Committed: Fri Jun 19 14:43:21 2015 -0700

----------------------------------------------------------------------
 _data/docs.json                                 | 75 ++++++++++++++---
 .../015-using-jdbc-driver.md                    | 89 +++++++++++---------
 2 files changed, 114 insertions(+), 50 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/cfd4573a/_data/docs.json
----------------------------------------------------------------------
diff --git a/_data/docs.json b/_data/docs.json
index 204c4fa..6989a98 100644
--- a/_data/docs.json
+++ b/_data/docs.json
@@ -4185,8 +4185,8 @@
                 }
             ], 
             "children": [], 
-            "next_title": "Using JDBC with SQuirreL on Windows", 
-            "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
+            "next_title": "Using the JDBC Driver", 
+            "next_url": "/docs/using-the-jdbc-driver/", 
             "parent": "ODBC/JDBC Interfaces", 
             "previous_title": "ODBC/JDBC Interfaces", 
             "previous_url": "/docs/odbc-jdbc-interfaces/", 
@@ -4920,8 +4920,8 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Using JDBC with SQuirreL on Windows", 
-                    "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
+                    "next_title": "Using the JDBC Driver", 
+                    "next_url": "/docs/using-the-jdbc-driver/", 
                     "parent": "ODBC/JDBC Interfaces", 
                     "previous_title": "ODBC/JDBC Interfaces", 
                     "previous_url": "/docs/odbc-jdbc-interfaces/", 
@@ -4937,11 +4937,28 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Installing the ODBC Driver", 
-                    "next_url": "/docs/installing-the-odbc-driver/", 
+                    "next_title": "Using JDBC with SQuirreL on Windows", 
+                    "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
                     "parent": "ODBC/JDBC Interfaces", 
                     "previous_title": "Interfaces Introduction", 
                     "previous_url": "/docs/interfaces-introduction/", 
+                    "relative_path": "_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md", 
+                    "title": "Using the JDBC Driver", 
+                    "url": "/docs/using-the-jdbc-driver/"
+                }, 
+                {
+                    "breadcrumbs": [
+                        {
+                            "title": "ODBC/JDBC Interfaces", 
+                            "url": "/docs/odbc-jdbc-interfaces/"
+                        }
+                    ], 
+                    "children": [], 
+                    "next_title": "Installing the ODBC Driver", 
+                    "next_url": "/docs/installing-the-odbc-driver/", 
+                    "parent": "ODBC/JDBC Interfaces", 
+                    "previous_title": "Using the JDBC Driver", 
+                    "previous_url": "/docs/using-the-jdbc-driver/", 
                     "relative_path": "_docs/odbc-jdbc-interfaces/020-using-jdbc-with-squirrel-on-windows.md", 
                     "title": "Using JDBC with SQuirreL on Windows", 
                     "url": "/docs/using-jdbc-with-squirrel-on-windows/"
@@ -10633,8 +10650,8 @@
             "next_title": "Installing the ODBC Driver", 
             "next_url": "/docs/installing-the-odbc-driver/", 
             "parent": "ODBC/JDBC Interfaces", 
-            "previous_title": "Interfaces Introduction", 
-            "previous_url": "/docs/interfaces-introduction/", 
+            "previous_title": "Using the JDBC Driver", 
+            "previous_url": "/docs/using-the-jdbc-driver/", 
             "relative_path": "_docs/odbc-jdbc-interfaces/020-using-jdbc-with-squirrel-on-windows.md", 
             "title": "Using JDBC with SQuirreL on Windows", 
             "url": "/docs/using-jdbc-with-squirrel-on-windows/"
@@ -10702,6 +10719,23 @@
             "title": "Using Tibco Spotfire with Drill", 
             "url": "/docs/using-tibco-spotfire-with-drill/"
         }, 
+        "Using the JDBC Driver": {
+            "breadcrumbs": [
+                {
+                    "title": "ODBC/JDBC Interfaces", 
+                    "url": "/docs/odbc-jdbc-interfaces/"
+                }
+            ], 
+            "children": [], 
+            "next_title": "Using JDBC with SQuirreL on Windows", 
+            "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
+            "parent": "ODBC/JDBC Interfaces", 
+            "previous_title": "Interfaces Introduction", 
+            "previous_url": "/docs/interfaces-introduction/", 
+            "relative_path": "_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md", 
+            "title": "Using the JDBC Driver", 
+            "url": "/docs/using-the-jdbc-driver/"
+        }, 
         "Value Vectors": {
             "breadcrumbs": [
                 {
@@ -11998,8 +12032,8 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Using JDBC with SQuirreL on Windows", 
-                    "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
+                    "next_title": "Using the JDBC Driver", 
+                    "next_url": "/docs/using-the-jdbc-driver/", 
                     "parent": "ODBC/JDBC Interfaces", 
                     "previous_title": "ODBC/JDBC Interfaces", 
                     "previous_url": "/docs/odbc-jdbc-interfaces/", 
@@ -12015,11 +12049,28 @@
                         }
                     ], 
                     "children": [], 
-                    "next_title": "Installing the ODBC Driver", 
-                    "next_url": "/docs/installing-the-odbc-driver/", 
+                    "next_title": "Using JDBC with SQuirreL on Windows", 
+                    "next_url": "/docs/using-jdbc-with-squirrel-on-windows/", 
                     "parent": "ODBC/JDBC Interfaces", 
                     "previous_title": "Interfaces Introduction", 
                     "previous_url": "/docs/interfaces-introduction/", 
+                    "relative_path": "_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md", 
+                    "title": "Using the JDBC Driver", 
+                    "url": "/docs/using-the-jdbc-driver/"
+                }, 
+                {
+                    "breadcrumbs": [
+                        {
+                            "title": "ODBC/JDBC Interfaces", 
+                            "url": "/docs/odbc-jdbc-interfaces/"
+                        }
+                    ], 
+                    "children": [], 
+                    "next_title": "Installing the ODBC Driver", 
+                    "next_url": "/docs/installing-the-odbc-driver/", 
+                    "parent": "ODBC/JDBC Interfaces", 
+                    "previous_title": "Using the JDBC Driver", 
+                    "previous_url": "/docs/using-the-jdbc-driver/", 
                     "relative_path": "_docs/odbc-jdbc-interfaces/020-using-jdbc-with-squirrel-on-windows.md", 
                     "title": "Using JDBC with SQuirreL on Windows", 
                     "url": "/docs/using-jdbc-with-squirrel-on-windows/"

http://git-wip-us.apache.org/repos/asf/drill/blob/cfd4573a/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
----------------------------------------------------------------------
diff --git a/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
index b2339fe..9f471eb 100755
--- a/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
+++ b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
@@ -9,14 +9,12 @@ This section explains how to install and use the JDBC driver for Apache Drill. F
 
   * JRE 7 or JDK 7
   * Drill installed either in embedded mode or in distributed mode on one or more nodes in a cluster. Refer to the [Install Drill]({{ site.baseurl }}/docs/install-drill/) documentation for more information.
-  * The client must be able to resolve the actual hostname of the Drill node(s) with the IP(s). Verify that a DNS entry was created on the client machine for the Drill node(s).
-     
-If a DNS entry does not exist, create the entry for the Drill node(s).
+  * The client must be able to resolve the actual hostname of the Drill node(s) with the IP(s). Verify that a DNS entry was created on the client machine for the Drill node(s).  If a DNS entry does not exist, create the entry for the Drill node(s).
 
     * For Windows, create the entry in the %WINDIR%\system32\drivers\etc\hosts file.
     * For Linux and Mac OSX, create the entry in /etc/hosts.  
 <drill-machine-IP> <drill-machine-hostname>
-    Example: `127.0.1.1 maprdemo`
+    For example: `127.0.1.1 maprdemo`
 
 
 ----------
@@ -25,25 +23,20 @@ If a DNS entry does not exist, create the entry for the Drill node(s).
 
 The Drill JDBC Driver `JAR` file must exist in a directory on a client machine so you can configure the driver for the application or third-party tool that you intend to use. You can obtain the driver in two different ways:
 
-1. Copy the `drill-jdbc-all` JAR file from the following Drill installation directory on a node where Drill is installed to a directory on your client
-machine:
+1. Copy the `drill-jdbc-all` JAR file from the following Drill installation directory on a node where Drill is installed to a directory on your client machine:
 
-    <drill_installation_directory>/jars/jdbc-driver/drill-jdbc-all-<version>.jar
+        <drill_installation_directory>/jars/jdbc-driver/drill-jdbc-all-<version>.jar
     
-    For example: drill1.0/jdbc-driver/drill-jdbc-all-1.0.0-mapr-r1.jar
+    For example, on a MapR cluster: `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/drill-jdbc-all-1.0.0-mapr-r1.jar`
 
 2. Download the following tar file to a location on your client machine: [apache-
 drill-1.0.0.tar.gz](http://apache.osuosl.org/drill/drill-1.0.0/apache-drill-1.0.0-src.tar.gz) and extract the file. You may need to use a decompression utility, such as [7-zip](http://www.7-zip.org/). The driver is extracted to the following directory:
 
-    <drill-home>\apache-drill-<version>\jars\jdbc-driver\drill-jdbc-all-<version>.jar
-
-Mac vs windows paths here....
-
-On a MapR cluster, the JDBC driver is installed here: `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/`
+    <drill-home>\apache-drill-\<version>\jars\jdbc-driver\drill-jdbc-all-<version>.jar
 
 ----------
 
-### JDBC Driver URLs
+### Configuring a Driver Application or Client
 
 To configure a JDBC application, users have to:
 
@@ -51,50 +44,70 @@ To configure a JDBC application, users have to:
 2. Use a valid Drill JDBC URL.
 3. Configure tools or application code with the name of the Drill driver class.
 
-The driver URLs that you use to create JDBC connection strings must be formed as stated in the following sections. 
+Most client tools provide a UI where you can enter all of the required connection information, including the Driver location, connection URL, and driver class name.
 
+### JDBC Driver URLs
 
-#### Driver Class Name
+The driver URLs that you use to create JDBC connection strings must be formed as follows:
 
-The class name for the JDBC driver is `org.apache.drill.jdbc.Driver`
+`jdbc:drill:zk=<zookeeper_quorum>:<port>/<drill_directory_in_zookeeper>/<cluster_ID>;schema=<schema_to_use_as_default>`
 
-#### URL Syntax
+Any Drill JDBC URL must start with: `jdbc:drill`.
 
-The form of the driver's JDBC URLs is as follows. The URL consists of some required and some optional parameters. 
+**ZooKeeper Quorum**
 
-A Drill JDBC URL must start with: `"{{jdbc:drill:}}"`
+To connect to a cluster, specify the ZooKeeper quorum as a list of hostnames or IP addresses. 
 
-#### URL Examples
+**ZooKeeper Port Number**
 
-`jdbc:drill:zk=maprdemo:5181`
+The default ZooKeeper port is 2181. On a MapR cluster, the ZooKeeper port is 5181.
 
-where `zk=maprdemo:5181` defines the ZooKeeper quorum.
+**Drill Directory in ZooKeeper**
 
-`jdbc:drill:zk=10.10.100.56:5181/drill/drillbits1;schema=hive`
+The name of the Drill directory stored in ZooKeeper is `/drill`.
 
-where the ZooKeeper node IP address is provided as well as the Drill directory in ZK and the cluster ID?
+**Cluster ID**
 
-`jdbc:drill:zk=10.10.100.30:5181,10.10.100.31:5181,10.10.100.32:5181/drill/drillbits1;schema=hive`
+The Drill default cluster ID is <code>drillbits1</code>.
 
-<li>Including a default schema is optional.</li>
-<li>The ZooKeeper port is 2181. In a MapR cluster, the ZooKeeper port is 5181.</li>
-<li>The Drill directory stored in ZooKeeper is <code>/drill</code>.</li>
-<li>The Drill default cluster ID is<code> drillbits1</code>.</li>
+On a MapR cluster, check the following file for the cluster ID:
 
----------
+`/opt/mapr/drill/drill-1.0.0/conf/drill-override.conf`
 
-### JDBC Driver Configuration Options
+For example:
 
-To control the behavior of the Drill JDBC driver, you can append the following configuration options to the JDBC URL:
+`...
+drill.exec: {
+  cluster-id: "docs41cluster-drillbits",
+  zk.connect: "centos23.lab:5181,centos28.lab:5181,centos29.lab:5181"
+}
+...`
 
-<config options>
+**Schema**
 
+Optionally, include the default schema for the JDBC connection. For example:
+
+`schema=hive`
 
-----------
 
+### URL Examples
 
-### Related Documentation
+**Single-Node Installation**
 
-When you have connected to Drill through the JDBC Driver, you can issue queries from the JDBC application or client. Start by running
-a test query on some sample data included in the Drill installation.
+`jdbc:drill:zk=maprdemo:5181`
+
+`jdbc:drill:zk=centos23.lab:5181/drill/docs41cluster-drillbits`
+
+`jdbc:drill:zk=10.10.100.56:5181/drill/drillbits1;schema=hive`
+
+**Cluster Installation**
+
+`jdbc:drill:zk=10.10.100.30:5181,10.10.100.31:5181,10.10.100.32:5181/drill/drillbits1;schema=hive`
+
+---------
+
+### Driver Class Name
+
+The class name for the JDBC driver is `org.apache.drill.jdbc.Driver`
 
+-----------
\ No newline at end of file


[4/5] drill git commit: Generic Using JDBC topic

Posted by br...@apache.org.
Generic Using JDBC topic


Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/05f79eb5
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/05f79eb5
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/05f79eb5

Branch: refs/heads/gh-pages
Commit: 05f79eb58dc026da9b44a113310976e65efcde23
Parents: 8182090
Author: Bob Rumsby <br...@mapr.com>
Authored: Wed Jun 10 16:45:01 2015 -0700
Committer: Kristine Hahn <kh...@maprtech.com>
Committed: Fri Jun 19 14:43:21 2015 -0700

----------------------------------------------------------------------
 .../015-using-jdbc-driver.md                    | 100 +++++++++++++++++++
 1 file changed, 100 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/drill/blob/05f79eb5/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
----------------------------------------------------------------------
diff --git a/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
new file mode 100755
index 0000000..b2339fe
--- /dev/null
+++ b/_docs/odbc-jdbc-interfaces/015-using-jdbc-driver.md
@@ -0,0 +1,100 @@
+---
+title: "Using the JDBC Driver"
+parent: "ODBC/JDBC Interfaces"
+---
+This section explains how to install and use the JDBC driver for Apache Drill. For specific examples of client tool connections to Drill via JDBC, see [Using JDBC with SQuirreL]({{ site.baseurl }}/docs/.../) and [Configuring Spotfire Server]({{ site.baseurl }}/docs/.../).
+
+
+### Prerequisites
+
+  * JRE 7 or JDK 7
+  * Drill installed either in embedded mode or in distributed mode on one or more nodes in a cluster. Refer to the [Install Drill]({{ site.baseurl }}/docs/install-drill/) documentation for more information.
+  * The client must be able to resolve the actual hostname of the Drill node(s) with the IP(s). Verify that a DNS entry was created on the client machine for the Drill node(s).
+     
+If a DNS entry does not exist, create the entry for the Drill node(s).
+
+    * For Windows, create the entry in the %WINDIR%\system32\drivers\etc\hosts file.
+    * For Linux and Mac OSX, create the entry in /etc/hosts.  
+<drill-machine-IP> <drill-machine-hostname>
+    Example: `127.0.1.1 maprdemo`
+
+
+----------
+
+### Getting the Drill JDBC Driver
+
+The Drill JDBC Driver `JAR` file must exist in a directory on a client machine so you can configure the driver for the application or third-party tool that you intend to use. You can obtain the driver in two different ways:
+
+1. Copy the `drill-jdbc-all` JAR file from the following Drill installation directory on a node where Drill is installed to a directory on your client
+machine:
+
+    <drill_installation_directory>/jars/jdbc-driver/drill-jdbc-all-<version>.jar
+    
+    For example: drill1.0/jdbc-driver/drill-jdbc-all-1.0.0-mapr-r1.jar
+
+2. Download the following tar file to a location on your client machine: [apache-
+drill-1.0.0.tar.gz](http://apache.osuosl.org/drill/drill-1.0.0/apache-drill-1.0.0-src.tar.gz) and extract the file. You may need to use a decompression utility, such as [7-zip](http://www.7-zip.org/). The driver is extracted to the following directory:
+
+    <drill-home>\apache-drill-<version>\jars\jdbc-driver\drill-jdbc-all-<version>.jar
+
+Mac vs windows paths here....
+
+On a MapR cluster, the JDBC driver is installed here: `/opt/mapr/drill/drill-1.0.0/jars/jdbc-driver/`
+
+----------
+
+### JDBC Driver URLs
+
+To configure a JDBC application, users have to:
+
+1. Put the Drill JDBC jar file on the class path.
+2. Use a valid Drill JDBC URL.
+3. Configure tools or application code with the name of the Drill driver class.
+
+The driver URLs that you use to create JDBC connection strings must be formed as stated in the following sections. 
+
+
+#### Driver Class Name
+
+The class name for the JDBC driver is `org.apache.drill.jdbc.Driver`
+
+#### URL Syntax
+
+The form of the driver's JDBC URLs is as follows. The URL consists of some required and some optional parameters. 
+
+A Drill JDBC URL must start with: `"{{jdbc:drill:}}"`
+
+#### URL Examples
+
+`jdbc:drill:zk=maprdemo:5181`
+
+where `zk=maprdemo:5181` defines the ZooKeeper quorum.
+
+`jdbc:drill:zk=10.10.100.56:5181/drill/drillbits1;schema=hive`
+
+where the ZooKeeper node IP address is provided as well as the Drill directory in ZK and the cluster ID?
+
+`jdbc:drill:zk=10.10.100.30:5181,10.10.100.31:5181,10.10.100.32:5181/drill/drillbits1;schema=hive`
+
+<li>Including a default schema is optional.</li>
+<li>The ZooKeeper port is 2181. In a MapR cluster, the ZooKeeper port is 5181.</li>
+<li>The Drill directory stored in ZooKeeper is <code>/drill</code>.</li>
+<li>The Drill default cluster ID is<code> drillbits1</code>.</li>
+
+---------
+
+### JDBC Driver Configuration Options
+
+To control the behavior of the Drill JDBC driver, you can append the following configuration options to the JDBC URL:
+
+<config options>
+
+
+----------
+
+
+### Related Documentation
+
+When you have connected to Drill through the JDBC Driver, you can issue queries from the JDBC application or client. Start by running
+a test query on some sample data included in the Drill installation.
+