You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@knox.apache.org by su...@apache.org on 2017/02/10 19:43:37 UTC

[4/6] knox git commit: KNOX-865 Added a release module and refactored samples

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/home/samples/sandbox-with-knox-inside.xml
----------------------------------------------------------------------
diff --git a/gateway-release/home/samples/sandbox-with-knox-inside.xml b/gateway-release/home/samples/sandbox-with-knox-inside.xml
deleted file mode 100644
index 9c92ead..0000000
--- a/gateway-release/home/samples/sandbox-with-knox-inside.xml
+++ /dev/null
@@ -1,96 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!--
-  Licensed to the Apache Software Foundation (ASF) under one or more
-  contributor license agreements.  See the NOTICE file distributed with
-  this work for additional information regarding copyright ownership.
-  The ASF licenses this file to You under the Apache License, Version 2.0
-  (the "License"); you may not use this file except in compliance with
-  the License.  You may obtain a copy of the License at
-
-      http://www.apache.org/licenses/LICENSE-2.0
-
-  Unless required by applicable law or agreed to in writing, software
-  distributed under the License is distributed on an "AS IS" BASIS,
-  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-  See the License for the specific language governing permissions and
-  limitations under the License.
--->
-<topology>
-
-    <gateway>
-        <provider>
-            <role>authentication</role>
-            <name>ShiroProvider</name>
-            <enabled>true</enabled>
-            <param>
-                <name>main.ldapRealm</name>
-                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapRealm</value>
-            </param>
-            <param>
-                <name>main.ldapContextFactory</name>
-                <value>org.apache.hadoop.gateway.shirorealm.KnoxLdapContextFactory</value>
-            </param>
-            <param>
-                <name>main.ldapRealm.contextFactory</name>
-                <value>$ldapContextFactory</value>
-            </param>
-            <param>
-                <name>main.ldapRealm.userDnTemplate</name>
-                <value>uid={0},ou=people,dc=hadoop,dc=apache,dc=org</value>
-            </param>
-            <param>
-                <name>main.ldapRealm.contextFactory.url</name>
-                <value>ldap://localhost:33389</value>
-            </param>
-            <param>
-                <name>main.ldapRealm.contextFactory.authenticationMechanism</name>
-                <value>simple</value>
-            </param>
-            <param>
-                <name>urls./**</name>
-                <value>authcBasic</value>
-            </param>
-        </provider>
-        <provider>
-            <role>identity-assertion</role>
-            <name>Default</name>
-            <enabled>true</enabled>
-        </provider>
-        <provider>
-            <role>hostmap</role>
-            <name>static</name>
-            <enabled>true</enabled>
-            <param><name>localhost</name><value>sandbox,sandbox.hortonworks.com</value></param>
-        </provider>
-    </gateway>
-
-    <service>
-        <role>NAMENODE</role>
-        <url>hdfs://sandbox:8020</url>
-    </service>
-    <service>
-        <role>JOBTRACKER</role>
-        <url>rpc://sandbox:8050</url>
-    </service>
-    <service>
-        <role>WEBHDFS</role>
-        <url>http://sandbox:50070/webhdfs</url>
-    </service>
-    <service>
-        <role>WEBHCAT</role>
-        <url>http://sandbox:50111/templeton</url>
-    </service>
-    <service>
-        <role>OOZIE</role>
-        <url>http://sandbox:11000/oozie</url>
-    </service>
-    <service>
-        <role>STARGATE</role>
-        <url>http://sandbox:60080</url>
-    </service>
-    <service>
-        <role>HIVE</role>
-        <url>http://sandbox:10001/cliservice</url>
-    </service>
-
-</topology>

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-release/pom.xml b/gateway-release/pom.xml
index 09c38fd..57003fc 100644
--- a/gateway-release/pom.xml
+++ b/gateway-release/pom.xml
@@ -130,6 +130,14 @@
                                             <outputDirectory>${project.build.directory}</outputDirectory>
                                             <includes>applications/**/*</includes>
                                         </artifactItem>
+                                        <artifactItem>
+                                            <groupId>${gateway-group}</groupId>
+                                            <artifactId>gateway-shell-samples</artifactId>
+                                            <type>jar</type>
+                                            <overWrite>false</overWrite>
+                                            <outputDirectory>${project.build.directory}</outputDirectory>
+                                            <includes>samples/**/*</includes>
+                                        </artifactItem>
                                     </artifactItems>
                                 </configuration>
                             </execution>
@@ -309,6 +317,10 @@
             <groupId>${gateway-group}</groupId>
             <artifactId>gateway-applications</artifactId>
         </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-shell-samples</artifactId>
+        </dependency>
 
         <dependency>
             <groupId>junit</groupId>

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-release/src/assembly.xml
----------------------------------------------------------------------
diff --git a/gateway-release/src/assembly.xml b/gateway-release/src/assembly.xml
index 16e9853..7ca5fed 100644
--- a/gateway-release/src/assembly.xml
+++ b/gateway-release/src/assembly.xml
@@ -66,6 +66,13 @@
             </includes>
         </fileSet>
         <fileSet>
+            <directory>${project.build.directory}/samples/</directory>
+            <outputDirectory>samples</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+        <fileSet>
             <directory>${project.build.directory}/applications</directory>
             <outputDirectory>data/applications</outputDirectory>
             <includes>

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/home/bin/knox-env.sh
----------------------------------------------------------------------
diff --git a/gateway-shell-release/home/bin/knox-env.sh b/gateway-shell-release/home/bin/knox-env.sh
new file mode 100644
index 0000000..1876a5d
--- /dev/null
+++ b/gateway-shell-release/home/bin/knox-env.sh
@@ -0,0 +1,68 @@
+#!/usr/bin/env bash
+
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+ENV_PID_DIR=""
+
+JAVA_VERSION_PATTERNS=( "1.6.0_31/bin/java$" "1.6.0_.*/bin/java$" "1.6.0.*/bin/java$" "1.6\..*/bin/java$" "/bin/java$" )
+
+function findJava() {
+  # Check to make sure any existing JAVA var is valid.
+  if [ "$JAVA" != "" ]; then
+    if [ ! -x "$JAVA" ]; then
+      JAVA=""
+    fi
+  fi
+  
+  # Try to use JAVA_HOME to find java.
+  if [ "$JAVA" == "" ]; then
+    if [ "$JAVA_HOME" != "" ]; then
+      JAVA=$JAVA_HOME/bin/java
+      if [ ! -x "$JAVA" ]; then
+        JAVA=""
+      fi
+    fi
+  fi
+
+  # Try to find java on PATH.
+  if [ "$JAVA" == "" ]; then
+    JAVA=`which java 2>/dev/null`
+    if [ ! -x "$JAVA" ]; then
+      JAVA=""
+    fi
+  fi
+
+  # Use the search patterns to find java.
+  if [ "$JAVA" == "" ]; then
+    for pattern in "${JAVA_VERSION_PATTERNS[@]}"; do
+      JAVA=( $(find /usr -executable -name java -print 2> /dev/null | grep "$pattern" | head -n 1 ) )
+      if [ -x "$JAVA" ]; then
+        break
+      else
+        JAVA=""
+      fi
+    done
+  fi
+}
+
+findJava
+
+  if [[ -z $JAVA ]]; then
+    echo "Warning: JAVA is not set and could not be found." 1>&2
+  fi
+

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/home/bin/knoxshell.cmd
----------------------------------------------------------------------
diff --git a/gateway-shell-release/home/bin/knoxshell.cmd b/gateway-shell-release/home/bin/knoxshell.cmd
new file mode 100644
index 0000000..2202198
--- /dev/null
+++ b/gateway-shell-release/home/bin/knoxshell.cmd
@@ -0,0 +1,35 @@
+@echo off
+REM  Licensed to the Apache Software Foundation (ASF) under one or more
+REM  contributor license agreements.  See the NOTICE file distributed with
+REM  this work for additional information regarding copyright ownership.
+REM  The ASF licenses this file to You under the Apache License, Version 2.0
+REM  (the "License"); you may not use this file except in compliance with
+REM  the License.  You may obtain a copy of the License at
+REM
+REM      http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM  Unless required by applicable law or agreed to in writing, software
+REM  distributed under the License is distributed on an "AS IS" BASIS,
+REM  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+REM  See the License for the specific language governing permissions and
+REM  limitations under the License.
+
+REM Start/stop script location
+set APP_BIN_DIR=%~dp0
+
+REM The app's jar name
+set APP_JAR=%APP_BIN_DIR%knoxshell.jar
+
+if not exist "%JAVA_HOME%"\bin\java.exe (
+  echo Error: JAVA_HOME is incorrectly set.
+  exit /B 1
+)
+set JAVA=%JAVA_HOME%\bin\java
+
+"%JAVA%" -jar "%APP_JAR%" %*
+
+if not %ERRORLEVEL% == 0 (
+  exit /B %ERRORLEVEL%
+)
+
+exit /B 0

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/home/bin/knoxshell.sh
----------------------------------------------------------------------
diff --git a/gateway-shell-release/home/bin/knoxshell.sh b/gateway-shell-release/home/bin/knoxshell.sh
new file mode 100755
index 0000000..05015e5
--- /dev/null
+++ b/gateway-shell-release/home/bin/knoxshell.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+
+#
+#  Licensed to the Apache Software Foundation (ASF) under one or more
+#  contributor license agreements.  See the NOTICE file distributed with
+#  this work for additional information regarding copyright ownership.
+#  The ASF licenses this file to You under the Apache License, Version 2.0
+#  (the "License"); you may not use this file except in compliance with
+#  the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+#  limitations under the License.
+#
+
+# The app's label
+APP_LABEL=KnoxShell
+
+# The app's name
+APP_NAME=knoxshell
+
+# Start/stop script location
+APP_BIN_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+
+# The app's jar name
+APP_JAR="$APP_BIN_DIR/knoxshell.jar"
+
+# The apps home dir
+APP_HOME_DIR=`dirname $APP_BIN_DIR`
+
+# The apps home dir
+APP_CONF_DIR="$APP_HOME_DIR/conf"
+
+# The app's log dir
+APP_LOG_DIR="$APP_HOME_DIR/logs"
+
+# The app's logging options
+APP_LOG_OPTS=""
+
+# The app's memory options
+APP_MEM_OPTS=""
+
+# The app's debugging options
+APP_DBG_OPTS=""
+
+# Name of LOG/OUT/ERR file
+APP_OUT_FILE="$APP_LOG_DIR/$APP_NAME.out"
+APP_ERR_FILE="$APP_LOG_DIR/$APP_NAME.err"
+
+# Setup the common environment
+. $APP_BIN_DIR/knox-env.sh
+
+function main {
+   #printf "Starting $APP_LABEL \n"
+   #printf "$@"
+   
+   $JAVA $APP_MEM_OPTS $APP_DBG_OPTS $APP_LOG_OPTS -jar $APP_JAR $@ || exit 1
+
+   return 0
+}
+
+function printHelp {
+   $JAVA -jar $APP_JAR -help
+   return 0
+}
+
+#Starting main
+main $@

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/home/conf/knoxshell-log4j.properties
----------------------------------------------------------------------
diff --git a/gateway-shell-release/home/conf/knoxshell-log4j.properties b/gateway-shell-release/home/conf/knoxshell-log4j.properties
new file mode 100644
index 0000000..c7fab24
--- /dev/null
+++ b/gateway-shell-release/home/conf/knoxshell-log4j.properties
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+app.log.dir=${launcher.dir}/../logs
+app.log.file=${launcher.name}.log
+
+log4j.rootLogger=ERROR, drfa
+
+log4j.logger.org.apache.hadoop.gateway=INFO
+#log4j.logger.org.apache.hadoop.gateway=DEBUG
+
+log4j.appender.stdout=org.apache.log4j.ConsoleAppender
+log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
+log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+
+log4j.appender.drfa=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.drfa.File=${app.log.dir}/${app.log.file}
+log4j.appender.drfa.DatePattern=.yyyy-MM-dd
+log4j.appender.drfa.layout=org.apache.log4j.PatternLayout
+log4j.appender.drfa.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-shell-release/pom.xml b/gateway-shell-release/pom.xml
new file mode 100644
index 0000000..8a4b55e
--- /dev/null
+++ b/gateway-shell-release/pom.xml
@@ -0,0 +1,180 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+    <parent>
+        <groupId>org.apache.knox</groupId>
+        <artifactId>gateway</artifactId>
+        <version>0.12.0-SNAPSHOT</version>
+    </parent>
+    <artifactId>gateway-shell-release</artifactId>
+
+    <name>gateway-shell-release</name>
+    <description>The gateway shell binary release packaging.</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-shade-plugin</artifactId>
+                <version>3.0.0</version>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>shade</goal>
+                        </goals>
+                        <configuration>
+                            <transformers>
+                                <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <mainClass>org.apache.hadoop.gateway.launcher.Launcher</mainClass>
+                                </transformer>
+                            </transformers>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+    <profiles>
+        <profile>
+            <id>package</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <inherited>false</inherited>
+                        <artifactId>maven-assembly-plugin</artifactId>
+                        <version>2.4</version>
+                        <executions>
+                            <execution>
+                                <id>binary</id>
+                                <phase>package</phase>
+                                <goals><goal>single</goal></goals>
+                                <configuration>
+                                    <finalName>knoxshell-${gateway-version}</finalName>
+                                    <outputDirectory>../target/${gateway-version}</outputDirectory>
+                                    <appendAssemblyId>false</appendAssemblyId>
+                                    <descriptors>
+                                        <descriptor>src/assembly.xml</descriptor>
+                                    </descriptors>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <artifactId>maven-antrun-plugin</artifactId>
+                        <executions>
+                            <execution>
+                                <phase>package</phase>
+                                <goals><goal>run</goal></goals>
+                                <configuration>
+                                    <tasks>
+                                        <checksum algorithm="MD5" fileext=".md5">
+                                            <fileset dir="../target/${gateway-version}">
+                                                <include name="knoxshell-${gateway-version}.zip" />
+                                                <include name="knoxshell-${gateway-version}.tar.gz" />
+                                            </fileset>
+                                        </checksum>
+                                        <checksum algorithm="SHA1" fileext=".sha">
+                                            <fileset dir="../target/${gateway-version}">
+                                                <include name="knoxshell-${gateway-version}.zip" />
+                                                <include name="knoxshell-${gateway-version}.tar.gz" />
+                                            </fileset>
+                                        </checksum>
+                                    </tasks>
+                                </configuration>
+                            </execution>
+                        </executions>
+                        <dependencies>
+                            <dependency>
+                                <groupId>org.apache.ant</groupId>
+                                <artifactId>ant-nodeps</artifactId>
+                                <version>1.8.1</version>
+                            </dependency>
+                        </dependencies>
+                    </plugin>
+                    <plugin>
+                        <!-- Using the dependency plugin to grab a dependency jar (gateway-service-definitions) and
+                        unpack some resources into the build directory so that it can be included in the assembly.
+                        The phase 'generate-resources' is chosen since it is not only the closest fit to the description
+                        of the action, but more importantly, it is a phase that is guaranteed to occur before the
+                        assembly which is tied to 'package'.
+                        -->
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-dependency-plugin</artifactId>
+                        <version>2.8</version>
+                        <executions>
+                            <execution>
+                                <id>unpack-services</id>
+                                <phase>generate-resources</phase>
+                                <goals>
+                                    <goal>unpack</goal>
+                                </goals>
+                                <configuration>
+                                    <artifactItems>
+                                        <artifactItem>
+                                            <groupId>${gateway-group}</groupId>
+                                            <artifactId>gateway-shell-samples</artifactId>
+                                            <type>jar</type>
+                                            <overWrite>false</overWrite>
+                                            <outputDirectory>${project.build.directory}</outputDirectory>
+                                            <includes>samples/**/*</includes>
+                                        </artifactItem>
+                                    </artifactItems>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+    <dependencies>
+
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-shell</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-shell-launcher</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>${gateway-group}</groupId>
+            <artifactId>gateway-shell-samples</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+
+    </dependencies>
+
+</project>

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-release/src/assembly.xml
----------------------------------------------------------------------
diff --git a/gateway-shell-release/src/assembly.xml b/gateway-shell-release/src/assembly.xml
new file mode 100644
index 0000000..43e57f5
--- /dev/null
+++ b/gateway-shell-release/src/assembly.xml
@@ -0,0 +1,82 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<assembly>
+    <id>bin</id>
+    <formats>
+        <format>zip</format>
+        <format>tar.gz</format>
+    </formats>
+    <fileSets>
+        <fileSet>
+            <directory>home</directory>
+            <outputDirectory></outputDirectory>
+            <includes>
+                <include>**</include>
+            </includes>
+            <excludes>
+                <exclude>**/*.sh</exclude>
+                <exclude>**/.idea/**</exclude>
+                <exclude>**/*.iml</exclude>
+                <exclude>**/.project</exclude>
+                <exclude>**/.settings/**</exclude>
+            </excludes>
+            <fileMode>0644</fileMode>
+            <directoryMode>0744</directoryMode>
+        </fileSet>
+        <fileSet>
+            <directory>home</directory>
+            <outputDirectory></outputDirectory>
+            <includes>
+                <include>**/*.sh</include>
+            </includes>
+            <fileMode>0755</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>..</directory>
+            <outputDirectory></outputDirectory>
+            <includes>
+                <include>LICENSE</include>
+                <include>NOTICE</include>
+            </includes>
+            <fileMode>0444</fileMode>
+        </fileSet>
+        <fileSet>
+            <directory>${project.build.directory}/samples/</directory>
+            <outputDirectory>samples</outputDirectory>
+            <includes>
+                <include>**/*</include>
+            </includes>
+        </fileSet>
+    </fileSets>
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>bin</outputDirectory>
+            <outputFileNameMapping>knoxshell.jar</outputFileNameMapping>
+            <includes>
+                <include>${gateway-group}:gateway-shell-release</include>
+            </includes>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>samples</outputDirectory>
+            <outputFileNameMapping>hadoop-examples.jar</outputFileNameMapping>
+            <includes>
+                <include>${gateway-group}:hadoop-examples</include>
+            </includes>
+        </dependencySet>
+    </dependencySets>
+</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/pom.xml
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/pom.xml b/gateway-shell-samples/pom.xml
new file mode 100644
index 0000000..c282e8f
--- /dev/null
+++ b/gateway-shell-samples/pom.xml
@@ -0,0 +1,48 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>gateway</artifactId>
+        <groupId>org.apache.knox</groupId>
+        <version>0.12.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>gateway-shell-samples</artifactId>
+    <name>gateway-shell-samples</name>
+    <description>Sample scripts to demonstrate various shell capabilities</description>
+
+    <licenses>
+        <license>
+            <name>The Apache Software License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleHBase.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleHBase.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleHBase.groovy
new file mode 100644
index 0000000..7d20aca
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleHBase.groovy
@@ -0,0 +1,181 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.gateway.shell.hbase
+
+import org.apache.hadoop.gateway.shell.Hadoop
+
+import static java.util.concurrent.TimeUnit.SECONDS
+
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+tableName = "test_table"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+session = Hadoop.login(gateway, username, pass)
+
+println "System version : " + HBase.session(session).systemVersion().now().string
+
+println "Cluster version : " + HBase.session(session).clusterVersion().now().string
+
+println "Status : " + HBase.session(session).status().now().string
+
+println "Creating table '" + tableName + "'..."
+
+HBase.session(session).table(tableName).create()  \
+    .attribute("tb_attr1", "value1")  \
+    .attribute("tb_attr2", "value2")  \
+    .family("family1")  \
+        .attribute("fm_attr1", "value3")  \
+        .attribute("fm_attr2", "value4")  \
+    .endFamilyDef()  \
+    .family("family2")  \
+    .family("family3")  \
+    .endFamilyDef()  \
+    .attribute("tb_attr3", "value5")  \
+    .now()
+
+println "Done"
+
+println "Table List : " + HBase.session(session).table().list().now().string
+
+println "Schema for table '" + tableName + "' : " + HBase.session(session)  \
+    .table(tableName)  \
+    .schema()  \
+    .now().string
+
+println "Updating schema of table '" + tableName + "'..."
+
+HBase.session(session).table(tableName).update()  \
+    .family("family1")  \
+        .attribute("fm_attr1", "new_value3")  \
+    .endFamilyDef()  \
+    .family("family4")  \
+        .attribute("fm_attr3", "value6")  \
+    .endFamilyDef()  \
+    .now()
+
+println "Done"
+
+println "Schema for table '" + tableName + "' : " + HBase.session(session)  \
+    .table(tableName)  \
+    .schema()  \
+    .now().string
+
+println "Inserting data into table..."
+
+HBase.session(session).table(tableName).row("row_id_1").store()  \
+    .column("family1", "col1", "col_value1")  \
+    .column("family1", "col2", "col_value2", 1234567890l)  \
+    .column("family2", null, "fam_value1")  \
+    .now()
+
+HBase.session(session).table(tableName).row("row_id_2").store()  \
+    .column("family1", "row2_col1", "row2_col_value1")  \
+    .now()
+
+println "Done"
+
+println "Querying row by id..."
+
+println HBase.session(session).table(tableName).row("row_id_1")  \
+    .query()  \
+    .now().string
+
+println "Querying all rows..."
+
+println HBase.session(session).table(tableName).row().query().now().string
+
+println "Querying row by id with extended settings..."
+
+println HBase.session(session).table(tableName).row().query()  \
+    .column("family1", "row2_col1")  \
+    .column("family2")  \
+    .times(0, Long.MAX_VALUE)  \
+    .numVersions(1)  \
+    .now().string
+
+println "Deleting cell..."
+
+HBase.session(session).table(tableName).row("row_id_1")  \
+    .delete()  \
+    .column("family1", "col1")  \
+    .now()
+
+println "Rows after delete:"
+
+println HBase.session(session).table(tableName).row().query().now().string
+
+println "Extended cell delete"
+
+HBase.session(session).table(tableName).row("row_id_1")  \
+    .delete()  \
+    .column("family2")  \
+    .time(Long.MAX_VALUE)  \
+    .now()
+
+println "Rows after delete:"
+
+println HBase.session(session).table(tableName).row().query().now().string
+
+println "Table regions : " + HBase.session(session).table(tableName)  \
+    .regions()  \
+    .now().string
+
+println "Creating scanner..."
+
+scannerId = HBase.session(session).table(tableName).scanner().create()  \
+    .column("family1", "col2")  \
+    .column("family2")  \
+    .startRow("row_id_1")  \
+    .endRow("row_id_2")  \
+    .batch(1)  \
+    .startTime(0)  \
+    .endTime(Long.MAX_VALUE)  \
+    .filter("")  \
+    .maxVersions(100)  \
+    .now().scannerId
+
+println "Scanner id=" + scannerId
+
+println "Scanner get next..."
+
+println HBase.session(session).table(tableName).scanner(scannerId)  \
+    .getNext()  \
+    .now().string
+
+println "Dropping scanner with id=" + scannerId
+
+HBase.session(session).table(tableName).scanner(scannerId).delete().now()
+
+println "Done"
+
+println "Dropping table '" + tableName + "'..."
+
+HBase.session(session).table(tableName).delete().now()
+
+println "Done"
+
+session.shutdown(10, SECONDS)

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleOozieWorkflow.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleOozieWorkflow.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleOozieWorkflow.groovy
new file mode 100644
index 0000000..b6e7e72
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleOozieWorkflow.groovy
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.workflow.Workflow
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+inputFile = "LICENSE"
+jarFile = "samples/hadoop-examples.jar"
+
+gateway = "https://localhost:8443/gateway/sandbox"
+tableName = "test_table"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+definition = """\
+<workflow-app xmlns="uri:oozie:workflow:0.2" name="wordcount-workflow">
+    <start to="root-node"/>
+    <action name="root-node">
+        <java>
+            <job-tracker>\${jobTracker}</job-tracker>
+            <name-node>\${nameNode}</name-node>
+            <main-class>org.apache.hadoop.examples.WordCount</main-class>
+            <arg>\${inputDir}</arg>
+            <arg>\${outputDir}</arg>
+        </java>
+        <ok to="end"/>
+        <error to="fail"/>
+    </action>
+    <kill name="fail">
+        <message>Java failed, error message[\${wf:errorMessage(wf:lastErrorNode())}]</message>
+    </kill>
+    <end name="end"/>
+</workflow-app>
+"""
+
+configuration = """\
+<configuration>
+    <property>
+        <name>user.name</name>
+        <value>default</value>
+    </property>
+    <property>
+        <name>nameNode</name>
+        <value>default</value>
+    </property>
+    <property>
+        <name>jobTracker</name>
+        <value>default</value>
+    </property>
+    <property>
+        <name>inputDir</name>
+        <value>$jobDir/input</value>
+    </property>
+    <property>
+        <name>outputDir</name>
+        <value>$jobDir/output</value>
+    </property>
+    <property>
+        <name>oozie.wf.application.path</name>
+        <value>$jobDir</value>
+    </property>
+</configuration>
+"""
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Mkdir " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+putData = Hdfs.put(session).file( inputFile ).to( jobDir + "/input/FILE" ).later() {
+  println "Put " + jobDir + "/input/FILE: " + it.statusCode }
+
+putJar = Hdfs.put(session).file( jarFile ).to( jobDir + "/lib/hadoop-examples.jar" ).later() {
+  println "Put " + jobDir + "/lib/hadoop-examples.jar: " + it.statusCode }
+
+putWorkflow = Hdfs.put(session).text( definition ).to( jobDir + "/workflow.xml" ).later() {
+  println "Put " + jobDir + "/workflow.xml: " + it.statusCode }
+
+session.waitFor( putWorkflow, putData, putJar )
+
+jobId = Workflow.submit(session).text( configuration ).now().jobId
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+status = "RUNNING";
+count = 0;
+while( status == "RUNNING" && count++ < 60 ) {
+  sleep( 1000 )
+  json = Workflow.status(session).jobId( jobId ).now().string
+  status = JsonPath.read( json, "\$.status" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + status
+
+if( status == "SUCCEEDED" ) {
+  text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
+  json = (new JsonSlurper()).parseText( text )
+  println json.FileStatuses.FileStatus.pathSuffix
+}
+
+println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJob.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJob.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJob.groovy
new file mode 100644
index 0000000..b7477b9
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJob.groovy
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+dataFile = "LICENSE"
+jarFile = "samples/hadoop-examples.jar"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+putData = Hdfs.put( session ).file( dataFile ).to( jobDir + "/input/" + dataFile ).later() {
+  println "Put " + jobDir + "/input/" + dataFile + ": " + it.statusCode }
+
+putJar = Hdfs.put( session ).file( jarFile ).to( jobDir + "/lib/hadoop-examples.jar" ).later() {
+  println "Put " + jobDir + "/lib/hadoop-examples.jar: " + it.statusCode }
+
+session.waitFor( putData, putJar )
+
+jobId = Job.submitJava(session) \
+  .jar( jobDir + "/lib/hadoop-examples.jar" ) \
+  .app( "org.apache.hadoop.examples.WordCount" ) \
+  .input( jobDir + "/input" ) \
+  .output( jobDir + "/output" ) \
+  .now().jobId
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+done = false
+count = 0
+while( !done && count++ < 60 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(session).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + done
+
+text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+
+println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJobTeragen.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJobTeragen.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJobTeragen.groovy
new file mode 100644
index 0000000..3b111a9
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatJobTeragen.groovy
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+// You will need to copy hadoop-mapreduce-samples.jar from your cluster
+// and place it under samples/ directory.
+// For example you might find the jar under: /usr/iop/current/hadoop-mapreduce-client
+jarFile = "samples/hadoop-mapreduce-examples.jar"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+putJar = Hdfs.put( session ).file( jarFile ).to( jobDir + "/lib/hadoop-mapreduce-examples.jar" ).later() {
+  println "Put " + jobDir + "/lib/hadoop-mapreduce-examples.jar: " + it.statusCode }
+
+session.waitFor( putJar )
+
+// Run teragen with 5 mappers. It will generate 500 records of 100 bytes each.
+jobId = Job.submitJava(session) \
+  .jar( jobDir + "/lib/hadoop-mapreduce-examples.jar" ) \
+  .app( "teragen" ) \
+  .arg( "-D").arg("mapred.map.tasks=5") \
+  .arg( "500" ) \
+  .input( jobDir + "/input_terasort" ) \
+  .now().jobId
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+done = false
+count = 0
+while( !done && count++ < 90 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(session).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + done
+
+text = Hdfs.ls( session ).dir( jobDir + "/input_terasort" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+
+println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatPig.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatPig.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatPig.groovy
new file mode 100644
index 0000000..8699608
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatPig.groovy
@@ -0,0 +1,84 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+id_pig = '''
+A = load 'test/input/$filename' using PigStorage(':');
+B = foreach A generate $0 as id;
+dump B;
+'''
+
+fake_passwd = '''ctdean:Chris Dean:secret
+pauls:Paul Stolorz:good
+carmas:Carlos Armas:evil
+dra:Deirdre McClure:marvelous
+'''
+
+Hdfs.put(session).text( id_pig ).to( jobDir + "/input/id.pig" ).now()
+Hdfs.put(session).text( fake_passwd ).to( jobDir + "/input/passwd" ).now()
+
+jobId = Job.submitPig(session) \
+            .file("${jobDir}/input/id.pig") \
+            .arg("-v") \
+            .arg("-p").arg("filename=passwd") \
+            .statusDir("${jobDir}/output") \
+            .now().jobId
+
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+done = false
+count = 0
+while( !done && count++ < 60 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(session).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + done
+
+text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+
+println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatQueue.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatQueue.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatQueue.groovy
new file mode 100644
index 0000000..2517db2
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatQueue.groovy
@@ -0,0 +1,38 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Queue: " + Job.queryQueue( session ).now().string
+
+println "Session closed: " + session.shutdown( 10, SECONDS )
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatSqoop.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatSqoop.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatSqoop.groovy
new file mode 100644
index 0000000..a5629e3
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHCatSqoop.groovy
@@ -0,0 +1,80 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.job.Job
+
+import static java.util.concurrent.TimeUnit.SECONDS
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+jobDir = "/user/" + username + "/test"
+
+session = Hadoop.login( gateway, username, pass )
+
+println "Delete " + jobDir + ": " + Hdfs.rm( session ).file( jobDir ).recursive().now().statusCode
+println "Create " + jobDir + ": " + Hdfs.mkdir( session ).dir( jobDir ).now().statusCode
+
+// Define sqoop options value for sqoop command.
+// This use publicly available Genome mysql database.
+// If the database is unavailable, setup an alternate database and update the
+// db information below.
+db = [ driver:"com.mysql.jdbc.Driver", url:"jdbc:mysql://genome-mysql.cse.ucsc.edu/hg38", user:"genome", password:"", name:"hg38", table:"scBlastTab", split:"query" ]
+
+targetdir = jobDir + "/" + db.table
+
+sqoop_command = "import --driver ${db.driver} --connect ${db.url} --username ${db.user} --password ${db.password} --table ${db.table} --split-by ${db.split} --target-dir ${targetdir}"
+
+jobId = Job.submitSqoop(session) \
+            .command(sqoop_command) \
+            .statusDir("${jobDir}/output") \
+            .now().jobId
+
+println "Submitted job: " + jobId
+
+println "Polling up to 60s for job completion..."
+done = false
+count = 0
+while( !done && count++ < 180 ) {
+  sleep( 1000 )
+  json = Job.queryStatus(session).jobId(jobId).now().string
+  done = JsonPath.read( json, "\$.status.jobComplete" )
+  print "."; System.out.flush();
+}
+println ""
+println "Job status: " + done
+
+text = Hdfs.ls( session ).dir( jobDir + "/output" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+
+println "Content of stderr:"
+println Hdfs.get( session ).from( jobDir + "/output/stderr" ).now().string
+
+println "Session closed: " + session.shutdown( 10, SECONDS )

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsLs.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsLs.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsLs.groovy
new file mode 100644
index 0000000..d631f33
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsLs.groovy
@@ -0,0 +1,40 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import groovy.json.JsonSlurper
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+session = Hadoop.login( gateway, username, pass )
+
+text = Hdfs.ls( session ).dir( "/" ).now().string
+json = (new JsonSlurper()).parseText( text )
+println json.FileStatuses.FileStatus.pathSuffix
+session.shutdown()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsPutGet.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsPutGet.groovy b/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsPutGet.groovy
new file mode 100644
index 0000000..68b5f67
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/ExampleWebHdfsPutGet.groovy
@@ -0,0 +1,45 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.hdfs.Hdfs
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+dataFile = "README"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+dataDir = "/user/" + username + "/example"
+
+session = Hadoop.login( gateway, username, pass )
+
+Hdfs.rm( session ).file( dataDir ).recursive().now()
+
+Hdfs.put( session ).file( dataFile ).to( dataDir + "/" + dataFile ).now()
+
+Hdfs.put( session ).file( dataFile ).to( dataDir + "/" + dataFile ).overwrite( true ).permission( 777 ).now()
+
+println Hdfs.get( session ).from( dataDir + "/" + dataFile ).now().string
+
+session.shutdown()
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/README
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/README b/gateway-shell-samples/src/main/resources/samples/README
new file mode 100644
index 0000000..3e59836
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/README
@@ -0,0 +1,13 @@
+*********************************************************************
+Samples README
+*********************************************************************
+The purpose of the samples within this directory is to demonstrate the capabilities
+of the Apache Knox Gateway to provide access to the numerous APIs that are available
+from the service components of a Hadoop cluster.
+
+Depending on exactly how your Knox installation was done, there will be some number of
+steps required in order fully install and configure the samples for use in your environment.
+
+Please see the following section within the Apache Knox Users Guide: 
+
+http://knox.apache.org/books/knox-0-5-0/knox-0-5-0.html#Gateway+Samples
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/SampleComplexCommand.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/SampleComplexCommand.groovy b/gateway-shell-samples/src/main/resources/samples/SampleComplexCommand.groovy
new file mode 100644
index 0000000..1abfa4a
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/SampleComplexCommand.groovy
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import com.jayway.jsonpath.JsonPath
+import org.apache.hadoop.gateway.shell.AbstractRequest
+import org.apache.hadoop.gateway.shell.BasicResponse
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.http.HttpResponse
+import org.apache.http.client.methods.HttpGet
+import org.apache.http.client.utils.URIBuilder
+
+import java.util.concurrent.Callable
+
+class SampleComplexCommand {
+
+  static class Request extends AbstractRequest<Response> {
+
+    Request( Hadoop hadoop ) {
+      super( hadoop )
+    }
+
+    private String param;
+    Request param( String param ) {
+      this.param = param;
+      return this;
+    }
+
+    @Override
+    protected Callable<Response> callable() {
+      return new Callable<Response>() {
+        @Override
+        Response call() {
+          URIBuilder uri = uri( SampleService.PATH, param )
+          addQueryParam( uri, "op", "LISTSTATUS" )
+          HttpGet get = new HttpGet( uri.build() )
+          return new Response( execute( get ) )
+        }
+      }
+    }
+
+  }
+
+  static class Response extends BasicResponse {
+
+    Response(HttpResponse response) {
+      super(response)
+    }
+
+    public List<String> getNames() {
+      return JsonPath.read( string, "\$.FileStatuses.FileStatus[*].pathSuffix" )
+    }
+
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/SampleScript.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/SampleScript.groovy b/gateway-shell-samples/src/main/resources/samples/SampleScript.groovy
new file mode 100644
index 0000000..219ea85
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/SampleScript.groovy
@@ -0,0 +1,35 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.hadoop.gateway.shell.Credentials
+
+gateway = "https://localhost:8443/gateway/sandbox"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+username = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+session = Hadoop.login( gateway, username, pass )
+
+println "JSON=" + SampleService.simple( session ).param( "/tmp" ).now().string
+
+println "Names=" + SampleService.complex( session ).param( "/tmp" ).now().names
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/SampleService.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/SampleService.groovy b/gateway-shell-samples/src/main/resources/samples/SampleService.groovy
new file mode 100644
index 0000000..3b1345c
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/SampleService.groovy
@@ -0,0 +1,32 @@
+/*
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+import org.apache.hadoop.gateway.shell.Hadoop
+
+class SampleService {
+
+  static String PATH = "/webhdfs/v1"
+
+  static SampleSimpleCommand simple( Hadoop hadoop ) {
+    return new SampleSimpleCommand( hadoop )
+  }
+
+  static SampleComplexCommand.Request complex( Hadoop hadoop ) {
+    return new SampleComplexCommand.Request( hadoop )
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/SampleSimpleCommand.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/SampleSimpleCommand.groovy b/gateway-shell-samples/src/main/resources/samples/SampleSimpleCommand.groovy
new file mode 100644
index 0000000..600113d
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/SampleSimpleCommand.groovy
@@ -0,0 +1,51 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import org.apache.hadoop.gateway.shell.AbstractRequest
+import org.apache.hadoop.gateway.shell.BasicResponse
+import org.apache.hadoop.gateway.shell.Hadoop
+import org.apache.http.client.methods.HttpGet
+import org.apache.http.client.utils.URIBuilder
+
+import java.util.concurrent.Callable
+
+class SampleSimpleCommand extends AbstractRequest<BasicResponse> {
+
+  SampleSimpleCommand( Hadoop hadoop ) {
+    super( hadoop )
+  }
+
+  private String param
+  SampleSimpleCommand param( String param ) {
+    this.param = param
+    return this
+  }
+
+  @Override
+  protected Callable<BasicResponse> callable() {
+    return new Callable<BasicResponse>() {
+      @Override
+      BasicResponse call() {
+        URIBuilder uri = uri( SampleService.PATH, param )
+        addQueryParam( uri, "op", "LISTSTATUS" )
+        HttpGet get = new HttpGet( uri.build() )
+        return new BasicResponse( execute( get ) )
+      }
+    }
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
new file mode 100644
index 0000000..0e465aa
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/HiveJDBCSample.groovy
@@ -0,0 +1,64 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.sql.DriverManager
+import org.apache.hadoop.gateway.shell.Credentials
+
+gatewayHost = "localhost";
+gatewayPort = 8443;
+trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
+trustStorePassword = "knoxsecret";
+contextPath = "gateway/sandbox-with-knox-inside/hive";
+connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+user = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+// Load Hive JDBC Driver
+Class.forName( "org.apache.hive.jdbc.HiveDriver" );
+
+// Configure JDBC connection
+connection = DriverManager.getConnection( connectionString, user, pass );
+
+statement = connection.createStatement();
+
+// Disable Hive authorization - This can be ommited if Hive authorization is configured properly
+statement.execute( "set hive.security.authorization.enabled=false" );
+
+// Drop sample table to ensure repeatability
+statement.execute( "DROP TABLE logs" );
+
+// Create sample table
+statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
+
+// Load data into Hive from file /tmp/log.txt which is placed on the local file system
+statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
+
+resultSet = statement.executeQuery( "SELECT * FROM logs" );
+
+while ( resultSet.next() ) {
+  System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
+}
+
+resultSet.close();
+statement.close();
+connection.close();

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
new file mode 100644
index 0000000..8ff61e4
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox-with-knox-inside/README
@@ -0,0 +1,2 @@
+This sample assumes that Knox is deployed on Sandbox.
+Use sandbox-with-knox-inside.xml as deployment configuration.

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
new file mode 100644
index 0000000..bd34edb
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/HiveJDBCSample.groovy
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+import java.sql.DriverManager
+import org.apache.hadoop.gateway.shell.Credentials
+
+gatewayHost = "localhost";
+gatewayPort = 8443;
+trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
+trustStorePassword = "knoxsecret";
+contextPath = "gateway/sandbox/hive";
+connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
+
+gateway = "https://localhost:8443/gateway/sandbox"
+tableName = "test_table"
+
+credentials = new Credentials()
+credentials.add("ClearInput", "Enter username: ", "user")
+                .add("HiddenInput", "Enter pas" + "sword: ", "pass")
+credentials.collect()
+
+user = credentials.get("user").string()
+pass = credentials.get("pass").string()
+
+// Load Hive JDBC Driver
+Class.forName( "org.apache.hive.jdbc.HiveDriver" );
+
+// Configure JDBC connection
+connection = DriverManager.getConnection( connectionString, user, pass );
+
+statement = connection.createStatement();
+
+// Disable Hive authorization - This can be ommited if Hive authorization is configured properly
+statement.execute( "set hive.security.authorization.enabled=false" );
+
+// Drop sample table to ensure repeatability
+statement.execute( "DROP TABLE logs" );
+
+// Create sample table
+statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
+
+// Load data into Hive from file /tmp/log.txt which is placed on the local file system
+statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
+
+resultSet = statement.executeQuery( "SELECT * FROM logs" );
+
+while ( resultSet.next() ) {
+  System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
+}
+
+resultSet.close();
+statement.close();
+connection.close();

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/README
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/README b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/README
new file mode 100644
index 0000000..f31ab55
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/groovy/jdbc/sandbox/README
@@ -0,0 +1 @@
+This sample assumes that Sandbox is running on the host where Knox is deployed.

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java b/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
new file mode 100644
index 0000000..2bcf4d0
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/HiveJDBCSample.java
@@ -0,0 +1,106 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.hadoop.gateway.shell.Credentials;
+
+public class HiveJDBCSample {
+
+  public static void main( String[] args ) {
+    Connection connection = null;
+    Statement statement = null;
+    ResultSet resultSet = null;
+
+    try {
+      String gatewayHost = "localhost";
+      int gatewayPort = 8443;
+      String trustStore = "/usr/lib/knox/data/security/keystores/gateway.jks";
+      String trustStorePassword = "knoxsecret";
+      String contextPath = "gateway/sandbox-with-knox-inside/hive";
+      String connectionString = String.format( "jdbc:hive2://%s:%d/;ssl=true;sslTrustStore=%s;trustStorePassword=%s?hive.server2.transport.mode=http;hive.server2.thrift.http.path=/%s", gatewayHost, gatewayPort, trustStore, trustStorePassword, contextPath );
+
+      Credentials credentials = new Credentials();
+      credentials.add("ClearInput", "Enter username: ", "user");
+                     .add("HiddenInput", "Enter pas" + "sword: ", "pass");
+      credentials.collect();
+
+      String username = credentials.get("user").string();
+      String pass = credentials.get("pass").string();
+
+      // Load Hive JDBC Driver
+      Class.forName( "org.apache.hive.jdbc.HiveDriver" );
+
+      // Configure JDBC connection
+      connection = DriverManager.getConnection( connectionString, user, password );
+
+      statement = connection.createStatement();
+
+      // Disable Hive authorization - This can be ommited if Hive authorization is configured properly
+      statement.execute( "set hive.security.authorization.enabled=false" );
+
+      // Drop sample table to ensure repeatability
+      statement.execute( "DROP TABLE logs" );
+
+      // Create sample table
+      statement.execute( "CREATE TABLE logs(column1 string, column2 string, column3 string, column4 string, column5 string, column6 string, column7 string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ' '" );
+
+      // Load data into Hive from file /tmp/log.txt which is placed on the local file system
+      statement.execute( "LOAD DATA LOCAL INPATH '/tmp/sample.log' OVERWRITE INTO TABLE logs" );
+
+      resultSet = statement.executeQuery( "SELECT * FROM logs" );
+
+      while ( resultSet.next() ) {
+        System.out.println( resultSet.getString( 1 ) + " --- " + resultSet.getString( 2 ) );
+      }
+    } catch ( ClassNotFoundException ex ) {
+      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+    } catch ( SQLException ex ) {
+      Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+    } finally {
+      if ( resultSet != null ) {
+        try {
+          resultSet.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+      if ( statement != null ) {
+        try {
+          statement.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+      if ( connection != null ) {
+        try {
+          connection.close();
+        } catch ( SQLException ex ) {
+          Logger.getLogger( HiveJDBCSample.class.getName() ).log( Level.SEVERE, null, ex );
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/knox/blob/761dd5d4/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/README
----------------------------------------------------------------------
diff --git a/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/README b/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/README
new file mode 100644
index 0000000..8ff61e4
--- /dev/null
+++ b/gateway-shell-samples/src/main/resources/samples/hive/java/jdbc/sandbox-with-knox-inside/README
@@ -0,0 +1,2 @@
+This sample assumes that Knox is deployed on Sandbox.
+Use sandbox-with-knox-inside.xml as deployment configuration.