You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by su...@apache.org on 2017/02/10 19:43:39 UTC

[6/6] knox git commit: KNOX-865 Added a release module and refactored samples

KNOX-865 Added a release module and refactored samples


Project: http://git-wip-us.apache.org/repos/asf/knox/repo
Commit: http://git-wip-us.apache.org/repos/asf/knox/commit/761dd5d4
Tree: http://git-wip-us.apache.org/repos/asf/knox/tree/761dd5d4
Diff: http://git-wip-us.apache.org/repos/asf/knox/diff/761dd5d4

Branch: refs/heads/master
Commit: 761dd5d44ac873df692ec3cf8ecdbf8342e3d0e8
Parents: f1dcae4
Author: Sumit Gupta <su...@apache.org>
Authored: Fri Feb 10 14:42:39 2017 -0500
Committer: Sumit Gupta <su...@apache.org>
Committed: Fri Feb 10 14:43:23 2017 -0500

----------------------------------------------------------------------
 .../home/samples/ExampleHBase.groovy            |  181 ---
 .../home/samples/ExampleOozieWorkflow.groovy    |  131 --
 .../home/samples/ExampleWebHCatJob.groovy       |   78 -
 .../samples/ExampleWebHCatJobTeragen.groovy     |   80 -
 .../home/samples/ExampleWebHCatPig.groovy       |   84 --
 .../home/samples/ExampleWebHCatQueue.groovy     |   38 -
 .../home/samples/ExampleWebHCatSqoop.groovy     |   80 -
 .../home/samples/ExampleWebHdfsLs.groovy        |   40 -
 .../home/samples/ExampleWebHdfsPutGet.groovy    |   45 -
 gateway-release/home/samples/README             |   13 -
 .../home/samples/SampleComplexCommand.groovy    |   69 -
 .../home/samples/SampleScript.groovy            |   35 -
 .../home/samples/SampleService.groovy           |   32 -
 .../home/samples/SampleSimpleCommand.groovy     |   51 -
 .../HiveJDBCSample.groovy                       |   64 -
 .../groovy/jdbc/sandbox-with-knox-inside/README |    2 -
 .../groovy/jdbc/sandbox/HiveJDBCSample.groovy   |   67 -
 .../samples/hive/groovy/jdbc/sandbox/README     |    1 -
 .../HiveJDBCSample.java                         |  106 --
 .../java/jdbc/sandbox-with-knox-inside/README   |    2 -
 .../hive/java/jdbc/sandbox/HiveJDBCSample.java  |  109 --
 .../home/samples/hive/java/jdbc/sandbox/README  |    1 -
 gateway-release/home/samples/hive/sample.log    | 1369 ------------------
 .../home/samples/sandbox-with-knox-inside.xml   |   96 --
 gateway-release/pom.xml                         |   12 +
 gateway-release/src/assembly.xml                |    7 +
 gateway-shell-release/home/bin/knox-env.sh      |   68 +
 gateway-shell-release/home/bin/knoxshell.cmd    |   35 +
 gateway-shell-release/home/bin/knoxshell.sh     |   72 +
 .../home/conf/knoxshell-log4j.properties        |   34 +
 gateway-shell-release/pom.xml                   |  180 +++
 gateway-shell-release/src/assembly.xml          |   82 ++
 gateway-shell-samples/pom.xml                   |   48 +
 .../main/resources/samples/ExampleHBase.groovy  |  181 +++
 .../samples/ExampleOozieWorkflow.groovy         |  131 ++
 .../resources/samples/ExampleWebHCatJob.groovy  |   78 +
 .../samples/ExampleWebHCatJobTeragen.groovy     |   80 +
 .../resources/samples/ExampleWebHCatPig.groovy  |   84 ++
 .../samples/ExampleWebHCatQueue.groovy          |   38 +
 .../samples/ExampleWebHCatSqoop.groovy          |   80 +
 .../resources/samples/ExampleWebHdfsLs.groovy   |   40 +
 .../samples/ExampleWebHdfsPutGet.groovy         |   45 +
 .../src/main/resources/samples/README           |   13 +
 .../samples/SampleComplexCommand.groovy         |   69 +
 .../main/resources/samples/SampleScript.groovy  |   35 +
 .../main/resources/samples/SampleService.groovy |   32 +
 .../samples/SampleSimpleCommand.groovy          |   51 +
 .../HiveJDBCSample.groovy                       |   64 +
 .../groovy/jdbc/sandbox-with-knox-inside/README |    2 +
 .../groovy/jdbc/sandbox/HiveJDBCSample.groovy   |   67 +
 .../samples/hive/groovy/jdbc/sandbox/README     |    1 +
 .../HiveJDBCSample.java                         |  106 ++
 .../java/jdbc/sandbox-with-knox-inside/README   |    2 +
 .../hive/java/jdbc/sandbox/HiveJDBCSample.java  |  109 ++
 .../samples/hive/java/jdbc/sandbox/README       |    1 +
 .../src/main/resources/samples/hive/sample.log  | 1369 ++++++++++++++++++
 .../samples/sandbox-with-knox-inside.xml        |   96 ++
 pom.xml                                         |   12 +
 58 files changed, 3324 insertions(+), 2774 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleHBase.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleHBase.groovy b/gateway-release/home/samples/ExampleHBase.groovy
deleted file mode 100644
index 7d20aca..0000000
--- a/gateway-release/home/samples/ExampleHBase.groovy
+++ /dev/null
@@ -1,181 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.gateway.shell.hbase
-
-import org.apache.hadoop.gateway.shell.Hadoop
-
-import static java.util.concurrent.TimeUnit.SECONDS
-
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-tableName = "test_table"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-session = Hadoop.login(gateway, username, pass)
-
-println "System version : " + HBase.session(session).systemVersion().now().string
-
-println "Cluster version : " + HBase.session(session).clusterVersion().now().string
-
-println "Status : " + HBase.session(session).status().now().string
-
-println "Creating table '" + tableName + "'..."
-
-HBase.session(session).table(tableName).create()  \
-    .attribute("tb_attr1", "value1")  \
-    .attribute("tb_attr2", "value2")  \
-    .family("family1")  \
-        .attribute("fm_attr1", "value3")  \
-        .attribute("fm_attr2", "value4")  \
-    .endFamilyDef()  \
-    .family("family2")  \
-    .family("family3")  \
-    .endFamilyDef()  \
-    .attribute("tb_attr3", "value5")  \
-    .now()
-
-println "Done"
-
-println "Table List : " + HBase.session(session).table().list().now().string
-
-println "Schema for table '" + tableName + "' : " + HBase.session(session)  \
-    .table(tableName)  \
-    .schema()  \
-    .now().string
-
-println "Updating schema of table '" + tableName + "'..."
-
-HBase.session(session).table(tableName).update()  \
-    .family("family1")  \
-        .attribute("fm_attr1", "new_value3")  \
-    .endFamilyDef()  \
-    .family("family4")  \
-        .attribute("fm_attr3", "value6")  \
-    .endFamilyDef()  \
-    .now()
-
-println "Done"
-
-println "Schema for table '" + tableName + "' : " + HBase.session(session)  \
-    .table(tableName)  \
-    .schema()  \
-    .now().string
-
-println "Inserting data into table..."
-
-HBase.session(session).table(tableName).row("row_id_1").store()  \
-    .column("family1", "col1", "col_value1")  \
-    .column("family1", "col2", "col_value2", 1234567890l)  \
-    .column("family2", null, "fam_value1")  \
-    .now()
-
-HBase.session(session).table(tableName).row("row_id_2").store()  \
-    .column("family1", "row2_col1", "row2_col_value1")  \
-    .now()
-
-println "Done"
-
-println "Querying row by id..."
-
-println HBase.session(session).table(tableName).row("row_id_1")  \
-    .query()  \
-    .now().string
-
-println "Querying all rows..."
-
-println HBase.session(session).table(tableName).row().query().now().string
-
-println "Querying row by id with extended settings..."
-
-println HBase.session(session).table(tableName).row().query()  \
-    .column("family1", "row2_col1")  \
-    .column("family2")  \
-    .times(0, Long.MAX_VALUE)  \
-    .numVersions(1)  \
-    .now().string
-
-println "Deleting cell..."
-
-HBase.session(session).table(tableName).row("row_id_1")  \
-    .delete()  \
-    .column("family1", "col1")  \
-    .now()
-
-println "Rows after delete:"
-
-println HBase.session(session).table(tableName).row().query().now().string
-
-println "Extended cell delete"
-
-HBase.session(session).table(tableName).row("row_id_1")  \
-    .delete()  \
-    .column("family2")  \
-    .time(Long.MAX_VALUE)  \
-    .now()
-
-println "Rows after delete:"
-
-println HBase.session(session).table(tableName).row().query().now().string
-
-println "Table regions : " + HBase.session(session).table(tableName)  \
-    .regions()  \
-    .now().string
-
-println "Creating scanner..."
-
-scannerId = HBase.session(session).table(tableName).scanner().create()  \
-    .column("family1", "col2")  \
-    .column("family2")  \
-    .startRow("row_id_1")  \
-    .endRow("row_id_2")  \
-    .batch(1)  \
-    .startTime(0)  \
-    .endTime(Long.MAX_VALUE)  \
-    .filter("")  \
-    .maxVersions(100)  \
-    .now().scannerId
-
-println "Scanner id=" + scannerId
-
-println "Scanner get next..."
-
-println HBase.session(session).table(tableName).scanner(scannerId)  \
-    .getNext()  \
-    .now().string
-
-println "Dropping scanner with id=" + scannerId
-
-HBase.session(session).table(tableName).scanner(scannerId).delete().now()
-
-println "Done"
-
-println "Dropping table '" + tableName + "'..."
-
-HBase.session(session).table(tableName).delete().now()
-
-println "Done"
-
-session.shutdown(10, SECONDS)

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleOozieWorkflow.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleOozieWorkflow.groovy b/gateway-release/home/samples/ExampleOozieWorkflow.groovy
deleted file mode 100644
index b6e7e72..0000000
--- a/gateway-release/home/samples/ExampleOozieWorkflow.groovy
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.workflow.Workflow
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-inputFile = "LICENSE"
-jarFile = "samples/hadoop-examples.jar"
-
-gateway = "https://localhost:8443/gateway/sandbox"
-tableName = "test_table"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-jobDir = "/user/" + username + "/test"
-
-definition = """\
-<workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
-    <start to="root-node"/>
-    <action name="root-node">
-        <java>
-            <job-tracker>\${jobTracker}</job-tracker>
-            <name-node>\${nameNode}</name-node>
-            <main-class>org.apache.hadoop.examples.WordCount</main-class>
-            <arg>\${inputDir}</arg>
-            <arg>\${outputDir}</arg>
-        </java>
-        <ok to="end"/>
-        <error to="fail"/>
-    </action>
-    <kill name="fail">
-        <message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message>
-    </kill>
-    <end name="end"/>
-</workflow-app>
-"""
-
-configuration = """\
-<configuration>
-    <property>
-        <name>user.name</name>
-        <value>default</value>
-    </property>
-    <property>
-        <name>nameNode</name>
-        <value>default</value>
-    </property>
-    <property>
-        <name>jobTracker</name>
-        <value>default</value>
-    </property>
-    <property>
-        <name>inputDir</name>
-        <value>$jobDir/input</value>
-    </property>
-    <property>
-        <name>outputDir</name>
-        <value>$jobDir/output</value>
-    </property>
-    <property>
-        <name>oozie.wf.application.path</name>
-        <value>$jobDir</value>
-    </property>
-</configuration>
-"""
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
-println "Mkdir " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-
-putData = Hdfs.put(session).file( inputFile ).to( jobDir + "/input/FILE" ).later() {
-  println "Put " + jobDir + "/input/FILE: " + it.statusCode }
-
-putJar = Hdfs.put(session).file( jarFile ).to( jobDir + "/lib/hadoop-examples.jar" ).later() {
-  println "Put " + jobDir + "/lib/hadoop-examples.jar: " + it.statusCode }
-
-putWorkflow = Hdfs.put(session).text( definition ).to( jobDir + "/workflow.xml" ).later() {
-  println "Put " + jobDir + "/workflow.xml: " + it.statusCode }
-
-session.waitFor( putWorkflow, putData, putJar )
-
-jobId = Workflow.submit(session).text( configuration ).now().jobId
-println "Submitted job: " + jobId
-
-println "Polling up to 60s for job completion..."
-status = "RUNNING";
-count = 0;
-while( status == "RUNNING" && count++ < 60 ) {
-  sleep( 1000 )
-  json = Workflow.status(session).jobId( jobId ).now().string
-  status = JsonPath.read( json, "\$.status" )
-  print "."; System.out.flush();
-}
-println ""
-println "Job status: " + status
-
-if( status == "SUCCEEDED" ) {
-  text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
-  json = (new JsonSlurper()).parseText( text )
-  println json.FileStatuses.FileStatus.pathSuffix
-}
-
-println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHCatJob.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHCatJob.groovy b/gateway-release/home/samples/ExampleWebHCatJob.groovy
deleted file mode 100644
index b7477b9..0000000
--- a/gateway-release/home/samples/ExampleWebHCatJob.groovy
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-dataFile = "LICENSE"
-jarFile = "samples/hadoop-examples.jar"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-jobDir = "/user/" + username + "/test"
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
-println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-
-putData = Hdfs.put( session ).file( dataFile ).to( jobDir + "/input/" + dataFile ).later() {
-  println "Put " + jobDir + "/input/" + dataFile + ": " + it.statusCode }
-
-putJar = Hdfs.put( session ).file( jarFile ).to( jobDir + "/lib/hadoop-examples.jar" ).later() {
-  println "Put " + jobDir + "/lib/hadoop-examples.jar: " + it.statusCode }
-
-session.waitFor( putData, putJar )
-
-jobId = Job.submitJava(session) \
-  .jar( jobDir + "/lib/hadoop-examples.jar" ) \
-  .app( "org.apache.hadoop.examples.WordCount" ) \
-  .input( jobDir + "/input" ) \
-  .output( jobDir + "/output" ) \
-  .now().jobId
-println "Submitted job: " + jobId
-
-println "Polling up to 60s for job completion..."
-done = false
-count = 0
-while( !done && count++ < 60 ) {
-  sleep( 1000 )
-  json = Job.queryStatus(session).jobId(jobId).now().string
-  done = JsonPath.read( json, "\$.status.jobComplete" )
-  print "."; System.out.flush();
-}
-println ""
-println "Job status: " + done
-
-text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-
-println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHCatJobTeragen.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHCatJobTeragen.groovy b/gateway-release/home/samples/ExampleWebHCatJobTeragen.groovy
deleted file mode 100644
index 3b111a9..0000000
--- a/gateway-release/home/samples/ExampleWebHCatJobTeragen.groovy
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-// You will need to copy hadoop-mapreduce-samples.jar from your cluster
-// and place it under samples/ directory.
-// For example you might find the jar under: /usr/iop/current/hadoop-mapreduce-client
-jarFile = "samples/hadoop-mapreduce-examples.jar"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-jobDir = "/user/" + username + "/test"
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
-println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-
-putJar = Hdfs.put( session ).file( jarFile ).to( jobDir + "/lib/hadoop-mapreduce-examples.jar" ).later() {
-  println "Put " + jobDir + "/lib/hadoop-mapreduce-examples.jar: " + it.statusCode }
-
-session.waitFor( putJar )
-
-// Run teragen with 5 mappers. It will generate 500 records of 100 bytes each.
-jobId = Job.submitJava(session) \
-  .jar( jobDir + "/lib/hadoop-mapreduce-examples.jar" ) \
-  .app( "teragen" ) \
-  .arg( "-D").arg("mapred.map.tasks=5") \
-  .arg( "500" ) \
-  .input( jobDir + "/input_terasort" ) \
-  .now().jobId
-println "Submitted job: " + jobId
-
-println "Polling up to 60s for job completion..."
-done = false
-count = 0
-while( !done && count++ < 90 ) {
-  sleep( 1000 )
-  json = Job.queryStatus(session).jobId(jobId).now().string
-  done = JsonPath.read( json, "\$.status.jobComplete" )
-  print "."; System.out.flush();
-}
-println ""
-println "Job status: " + done
-
-text = Hdfs.ls( session ).dir( jobDir + "/input_terasort" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-
-println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHCatPig.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHCatPig.groovy b/gateway-release/home/samples/ExampleWebHCatPig.groovy
deleted file mode 100644
index 8699608..0000000
--- a/gateway-release/home/samples/ExampleWebHCatPig.groovy
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-jobDir = "/user/" + username + "/test"
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
-println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-
-id_pig = '''
-A = load 'test/input/$filename' using PigStorage(':');
-B = foreach A generate $0 as id;
-dump B;
-'''
-
-fake_passwd = '''ctdean:Chris Dean:secret
-pauls:Paul Stolorz:good
-carmas:Carlos Armas:evil
-dra:Deirdre McClure:marvelous
-'''
-
-Hdfs.put(session).text( id_pig ).to( jobDir + "/input/id.pig" ).now()
-Hdfs.put(session).text( fake_passwd ).to( jobDir + "/input/passwd" ).now()
-
-jobId = Job.submitPig(session) \
-            .file("${jobDir}/input/id.pig") \
-            .arg("-v") \
-            .arg("-p").arg("filename=passwd") \
-            .statusDir("${jobDir}/output") \
-            .now().jobId
-
-println "Submitted job: " + jobId
-
-println "Polling up to 60s for job completion..."
-done = false
-count = 0
-while( !done && count++ < 60 ) {
-  sleep( 1000 )
-  json = Job.queryStatus(session).jobId(jobId).now().string
-  done = JsonPath.read( json, "\$.status.jobComplete" )
-  print "."; System.out.flush();
-}
-println ""
-println "Job status: " + done
-
-text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-
-println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHCatQueue.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHCatQueue.groovy b/gateway-release/home/samples/ExampleWebHCatQueue.groovy
deleted file mode 100644
index 2517db2..0000000
--- a/gateway-release/home/samples/ExampleWebHCatQueue.groovy
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Queue: " + Job.queryQueue( session ).now().string
-
-println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHCatSqoop.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHCatSqoop.groovy b/gateway-release/home/samples/ExampleWebHCatSqoop.groovy
deleted file mode 100644
index a5629e3..0000000
--- a/gateway-release/home/samples/ExampleWebHCatSqoop.groovy
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.job.Job
-
-import static java.util.concurrent.TimeUnit.SECONDS
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-jobDir = "/user/" + username + "/test"
-
-session = Hadoop.login( gateway, username, pass )
-
-println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
-println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
-
-// Define sqoop options value for sqoop command.
-// This use publicly available Genome mysql database.
-// If the database is unavailable, setup an alternate database and update the
-// db information below.
-db = [ driver:"com.mysql.jdbc.Driver", url:"jdbc:mysql://genome-mysql.cse.ucsc.edu/hg38", user:"genome", password:"", name:"hg38", table:"scBlastTab", split:"query" ]
-
-targetdir = jobDir + "/" + db.table
-
-sqoop_command = "import --driver ${db.driver} --connect ${db.url} --username ${db.user} --password ${db.password} --table ${db.table} --split-by ${db.split} --target-dir ${targetdir}"
-
-jobId = Job.submitSqoop(session) \
-            .command(sqoop_command) \
-            .statusDir("${jobDir}/output") \
-            .now().jobId
-
-println "Submitted job: " + jobId
-
-println "Polling up to 60s for job completion..."
-done = false
-count = 0
-while( !done && count++ < 180 ) {
-  sleep( 1000 )
-  json = Job.queryStatus(session).jobId(jobId).now().string
-  done = JsonPath.read( json, "\$.status.jobComplete" )
-  print "."; System.out.flush();
-}
-println ""
-println "Job status: " + done
-
-text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-
-println "Content of stderr:"
-println Hdfs.get( session ).from( jobDir + "/output/stderr" ).now().string
-
-println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHdfsLs.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHdfsLs.groovy b/gateway-release/home/samples/ExampleWebHdfsLs.groovy
deleted file mode 100644
index d631f33..0000000
--- a/gateway-release/home/samples/ExampleWebHdfsLs.groovy
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import groovy.json.JsonSlurper
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-session = Hadoop.login( gateway, username, pass )
-
-text = Hdfs.ls( session ).dir( "/" ).now().string
-json = (new JsonSlurper()).parseText( text )
-println json.FileStatuses.FileStatus.pathSuffix
-session.shutdown()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/ExampleWebHdfsPutGet.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/ExampleWebHdfsPutGet.groovy b/gateway-release/home/samples/ExampleWebHdfsPutGet.groovy
deleted file mode 100644
index 68b5f67..0000000
--- a/gateway-release/home/samples/ExampleWebHdfsPutGet.groovy
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.hdfs.Hdfs
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-dataFile = "README"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-dataDir = "/user/" + username + "/example"
-
-session = Hadoop.login( gateway, username, pass )
-
-Hdfs.rm( session ).file( dataDir ).recursive().now()
-
-Hdfs.put( session ).file( dataFile ).to( dataDir + "/" + dataFile ).now()
-
-Hdfs.put( session ).file( dataFile ).to( dataDir + "/" + dataFile ).overwrite( true ).permission( 777 ).now()
-
-println Hdfs.get( session ).from( dataDir + "/" + dataFile ).now().string
-
-session.shutdown()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/README b/gateway-release/home/samples/README
deleted file mode 100644
index 3e59836..0000000
--- a/gateway-release/home/samples/README
+++ /dev/null
@@ -1,13 +0,0 @@
-*********************************************************************
-Samples README
-*********************************************************************
-The purpose of the samples within this directory is to demonstrate the capabilities
-of the Apache Knox Gateway to provide access to the numerous APIs that are available
-from the service components of a Hadoop cluster.
-
-Depending on exactly how your Knox installation was done, there will be some number of
-steps required in order fully install and configure the samples for use in your environment.
-
-Please see the following section within the Apache Knox Users Guide: 
-
-http://knox.apache.org/books/knox-0-5-0/knox-0-5-0.html#Gateway+Samples
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/SampleComplexCommand.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/SampleComplexCommand.groovy b/gateway-release/home/samples/SampleComplexCommand.groovy
deleted file mode 100644
index 1abfa4a..0000000
--- a/gateway-release/home/samples/SampleComplexCommand.groovy
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import com.jayway.jsonpath.JsonPath
-import org.apache.hadoop.gateway.shell.AbstractRequest
-import org.apache.hadoop.gateway.shell.BasicResponse
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.http.HttpResponse
-import org.apache.http.client.methods.HttpGet
-import org.apache.http.client.utils.URIBuilder
-
-import java.util.concurrent.Callable
-
-class SampleComplexCommand {
-
-  static class Request extends AbstractRequest<Response> {
-
-    Request( Hadoop hadoop ) {
-      super( hadoop )
-    }
-
-    private String param;
-    Request param( String param ) {
-      this.param = param;
-      return this;
-    }
-
-    @Override
-    protected Callable<Response> callable() {
-      return new Callable<Response>() {
-        @Override
-        Response call() {
-          URIBuilder uri = uri( SampleService.PATH, param )
-          addQueryParam( uri, "op", "LISTSTATUS" )
-          HttpGet get = new HttpGet( uri.build() )
-          return new Response( execute( get ) )
-        }
-      }
-    }
-
-  }
-
-  static class Response extends BasicResponse {
-
-    Response(HttpResponse response) {
-      super(response)
-    }
-
-    public List<String> getNames() {
-      return JsonPath.read( string, "\$.FileStatuses.FileStatus[*].pathSuffix" )
-    }
-
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/SampleScript.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/SampleScript.groovy b/gateway-release/home/samples/SampleScript.groovy
deleted file mode 100644
index 219ea85..0000000
--- a/gateway-release/home/samples/SampleScript.groovy
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.hadoop.gateway.shell.Credentials
-
-gateway = "https://localhost:8443/gateway/sandbox"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-username = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-session = Hadoop.login( gateway, username, pass )
-
-println "JSON=" + SampleService.simple( session ).param( "/tmp" ).now().string
-
-println "Names=" + SampleService.complex( session ).param( "/tmp" ).now().names
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/SampleService.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/SampleService.groovy b/gateway-release/home/samples/SampleService.groovy
deleted file mode 100644
index 3b1345c..0000000
--- a/gateway-release/home/samples/SampleService.groovy
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one
-* or more contributor license agreements.  See the NOTICE file
-* distributed with this work for additional information
-* regarding copyright ownership.  The ASF licenses this file
-* to you under the Apache License, Version 2.0 (the
-* "License"); you may not use this file except in compliance
-* with the License.  You may obtain a copy of the License at
-*
-*     http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-import org.apache.hadoop.gateway.shell.Hadoop
-
-class SampleService {
-
-  static String PATH = "/webhdfs/v1"
-
-  static SampleSimpleCommand simple( Hadoop hadoop ) {
-    return new SampleSimpleCommand( hadoop )
-  }
-
-  static SampleComplexCommand.Request complex( Hadoop hadoop ) {
-    return new SampleComplexCommand.Request( hadoop )
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/SampleSimpleCommand.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/SampleSimpleCommand.groovy b/gateway-release/home/samples/SampleSimpleCommand.groovy
deleted file mode 100644
index 600113d..0000000
--- a/gateway-release/home/samples/SampleSimpleCommand.groovy
+++ /dev/null
@@ -1,51 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import org.apache.hadoop.gateway.shell.AbstractRequest
-import org.apache.hadoop.gateway.shell.BasicResponse
-import org.apache.hadoop.gateway.shell.Hadoop
-import org.apache.http.client.methods.HttpGet
-import org.apache.http.client.utils.URIBuilder
-
-import java.util.concurrent.Callable
-
-class SampleSimpleCommand extends AbstractRequest<BasicResponse> {
-
-  SampleSimpleCommand( Hadoop hadoop ) {
-    super( hadoop )
-  }
-
-  private String param
-  SampleSimpleCommand param( String param ) {
-    this.param = param
-    return this
-  }
-
-  @Override
-  protected Callable<BasicResponse> callable() {
-    return new Callable<BasicResponse>() {
-      @Override
-      BasicResponse call() {
-        URIBuilder uri = uri( SampleService.PATH, param )
-        addQueryParam( uri, "op", "LISTSTATUS" )
-        HttpGet get = new HttpGet( uri.build() )
-        return new BasicResponse( execute( get ) )
-      }
-    }
-  }
-
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy b/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
deleted file mode 100644
index 0e465aa..0000000
--- a/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
+++ /dev/null
@@ -1,64 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.sql.DriverManager
-import org.apache.hadoop.gateway.shell.Credentials
-
-gatewayHost = "localhost";
-gatewayPort = 8443;
-trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
-trustStorePassword = "knoxsecret";
-contextPath = "gateway/sandbox-with-knox-inside/hive";
-connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-user = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-// Load Hive JDBC Driver
-Class.forName( "org.apache.hive.jdbc.HiveDriver" );
-
-// Configure JDBC connection
-connection = DriverManager.getConnection( connectionString, user, pass );
-
-statement = connection.createStatement();
-
-// Disable Hive authorization - This can be ommited if Hive authorization is configured properly
-statement.execute( "set hive.security.authorization.enabled=false" );
-
-// Drop sample table to ensure repeatability
-statement.execute( "DROP TABLE logs" );
-
-// Create sample table
-statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
-
-// Load data into Hive from file /tmp/log.txt which is placed on the local file system
-statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
-
-resultSet = statement.executeQuery( "SELECT * FROM logs" );
-
-while ( resultSet.next() ) {
-  System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
-}
-
-resultSet.close();
-statement.close();
-connection.close();

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README b/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
deleted file mode 100644
index 8ff61e4..0000000
--- a/gateway-release/home/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This sample assumes that Knox is deployed on Sandbox.
-Use sandbox-with-knox-inside.xml as deployment configuration.

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy b/gateway-release/home/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
deleted file mode 100644
index bd34edb..0000000
--- a/gateway-release/home/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import java.sql.DriverManager
-import org.apache.hadoop.gateway.shell.Credentials
-
-gatewayHost = "localhost";
-gatewayPort = 8443;
-trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
-trustStorePassword = "knoxsecret";
-contextPath = "gateway/sandbox/hive";
-connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
-
-gateway = "https://localhost:8443/gateway/sandbox"
-tableName = "test_table"
-
-credentials = new Credentials()
-credentials.add("ClearInput", "Enter username: ", "user")
-                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
-credentials.collect()
-
-user = credentials.get("user").string()
-pass = credentials.get("pass").string()
-
-// Load Hive JDBC Driver
-Class.forName( "org.apache.hive.jdbc.HiveDriver" );
-
-// Configure JDBC connection
-connection = DriverManager.getConnection( connectionString, user, pass );
-
-statement = connection.createStatement();
-
-// Disable Hive authorization - This can be ommited if Hive authorization is configured properly
-statement.execute( "set hive.security.authorization.enabled=false" );
-
-// Drop sample table to ensure repeatability
-statement.execute( "DROP TABLE logs" );
-
-// Create sample table
-statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
-
-// Load data into Hive from file /tmp/log.txt which is placed on the local file system
-statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
-
-resultSet = statement.executeQuery( "SELECT * FROM logs" );
-
-while ( resultSet.next() ) {
-  System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
-}
-
-resultSet.close();
-statement.close();
-connection.close();

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/groovy/jdbc/sandbox/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/groovy/jdbc/sandbox/README b/gateway-release/home/samples/hive/groovy/jdbc/sandbox/README
deleted file mode 100644
index f31ab55..0000000
--- a/gateway-release/home/samples/hive/groovy/jdbc/sandbox/README
+++ /dev/null
@@ -1 +0,0 @@
-This sample assumes that Sandbox is running on the host where Knox is deployed.

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java b/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
deleted file mode 100644
index 2bcf4d0..0000000
--- a/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.gateway.shell.Credentials;
-
-public class HiveJDBCSample {
-
-  public static void main( String[] args ) {
-    Connection connection = null;
-    Statement statement = null;
-    ResultSet resultSet = null;
-
-    try {
-      String gatewayHost = "localhost";
-      int gatewayPort = 8443;
-      String trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
-      String trustStorePassword = "knoxsecret";
-      String contextPath = "gateway/sandbox-with-knox-inside/hive";
-      String connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
-
-      Credentials credentials = new Credentials();
-      credentials.add("ClearInput", "Enter username: ", "user");
-                     .add("HiddenInput", "Enter pas" + "sword: ", "pass");
-      credentials.collect();
-
-      String username = credentials.get("user").string();
-      String pass = credentials.get("pass").string();
-
-      // Load Hive JDBC Driver
-      Class.forName( "org.apache.hive.jdbc.HiveDriver" );
-
-      // Configure JDBC connection
-      connection = DriverManager.getConnection( connectionString, user, password );
-
-      statement = connection.createStatement();
-
-      // Disable Hive authorization - This can be ommited if Hive authorization is configured properly
-      statement.execute( "set hive.security.authorization.enabled=false" );
-
-      // Drop sample table to ensure repeatability
-      statement.execute( "DROP TABLE logs" );
-
-      // Create sample table
-      statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
-
-      // Load data into Hive from file /tmp/log.txt which is placed on the local file system
-      statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
-
-      resultSet = statement.executeQuery( "SELECT * FROM logs" );
-
-      while ( resultSet.next() ) {
-        System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
-      }
-    } catch ( ClassNotFoundException ex ) {
-      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-    } catch ( SQLException ex ) {
-      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-    } finally {
-      if ( resultSet != null ) {
-        try {
-          resultSet.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-      if ( statement != null ) {
-        try {
-          statement.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-      if ( connection != null ) {
-        try {
-          connection.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/README b/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/README
deleted file mode 100644
index 8ff61e4..0000000
--- a/gateway-release/home/samples/hive/java/jdbc/sandbox-with-knox-inside/README
+++ /dev/null
@@ -1,2 +0,0 @@
-This sample assumes that Knox is deployed on Sandbox.
-Use sandbox-with-knox-inside.xml as deployment configuration.

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/java/jdbc/sandbox/HiveJDBCSample.java
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/java/jdbc/sandbox/HiveJDBCSample.java b/gateway-release/home/samples/hive/java/jdbc/sandbox/HiveJDBCSample.java
deleted file mode 100644
index ffa7e58..0000000
--- a/gateway-release/home/samples/hive/java/jdbc/sandbox/HiveJDBCSample.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.hadoop.gateway.shell.Credentials;
-
-public class HiveJDBCSample {
-
-  public static void main( String[] args ) {
-    Connection connection = null;
-    Statement statement = null;
-    ResultSet resultSet = null;
-
-    try {
-      String gatewayHost = "localhost";
-      int gatewayPort = 8443;
-      String trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
-      String trustStorePassword = "knoxsecret";
-      String contextPath = "gateway/sandbox/hive";
-      String connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
-
-      gateway = "https://localhost:8443/gateway/sandbox"
-          tableName = "test_table"
-
-      Credentials credentials = new Credentials();
-      credentials.add("ClearInput", "Enter username: ", "user");
-                     .add("HiddenInput", "Enter pas" + "sword: ", "pass");
-      credentials.collect();
-
-      String username = credentials.get("user").string();
-      String pass = credentials.get("pass").string();
-
-      // Load Hive JDBC Driver
-      Class.forName( "org.apache.hive.jdbc.HiveDriver" );
-
-      // Configure JDBC connection
-      connection = DriverManager.getConnection( connectionString, user, pass );
-
-      statement = connection.createStatement();
-
-      // Disable Hive authorization - This can be ommited if Hive authorization is configured properly
-      statement.execute( "set hive.security.authorization.enabled=false" );
-
-      // Drop sample table to ensure repeatability
-      statement.execute( "DROP TABLE logs" );
-
-      // Create sample table
-      statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
-
-      // Load data into Hive from file /tmp/log.txt which is placed on the local file system
-      statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
-
-      resultSet = statement.executeQuery( "SELECT * FROM logs" );
-
-      while ( resultSet.next() ) {
-        System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
-      }
-    } catch ( ClassNotFoundException ex ) {
-      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-    } catch ( SQLException ex ) {
-      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-    } finally {
-      if ( resultSet != null ) {
-        try {
-          resultSet.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-      if ( statement != null ) {
-        try {
-          statement.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-      if ( connection != null ) {
-        try {
-          connection.close();
-        } catch ( SQLException ex ) {
-          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/hive/java/jdbc/sandbox/README
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/hive/java/jdbc/sandbox/README b/gateway-release/home/samples/hive/java/jdbc/sandbox/README
deleted file mode 100644
index f31ab55..0000000
--- a/gateway-release/home/samples/hive/java/jdbc/sandbox/README
+++ /dev/null
@@ -1 +0,0 @@
-This sample assumes that Sandbox is running on the host where Knox is deployed.