You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by of...@apache.org on 2018/04/28 12:52:37 UTC

[2/2] bigtop git commit: BIGTOP-3021: Remove HUE from Bigtop

BIGTOP-3021: Remove HUE from Bigtop


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/23582dc0
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/23582dc0
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/23582dc0

Branch: refs/heads/master
Commit: 23582dc063f13b29066cb64e6eaa5d134094b8e4
Parents: 5031b66
Author: Olaf Flebbe <of...@apache.org>
Authored: Tue Apr 10 20:51:04 2018 +0200
Committer: Olaf Flebbe <of...@apache.org>
Committed: Sat Apr 28 14:52:16 2018 +0200

----------------------------------------------------------------------
 MAINTAINERS.txt                                 |   1 -
 bigtop-deploy/puppet/README.md                  |   4 +-
 .../puppet/hieradata/bigtop/cluster.yaml        |  19 +-
 bigtop-deploy/puppet/hieradata/site.yaml        |   1 -
 bigtop-deploy/puppet/manifests/cluster.pp       |   4 -
 .../puppet/modules/hadoop/manifests/init.pp     |   7 +-
 .../modules/hadoop/templates/hdfs-site.xml      |  19 -
 .../hadoop_oozie/templates/oozie-site.xml       |  36 -
 .../puppet/modules/hue/manifests/init.pp        |  82 --
 .../puppet/modules/hue/templates/hue.ini        | 956 -------------------
 bigtop-deploy/puppet/modules/hue/tests/init.pp  |  24 -
 .../src/common/hadoop/install_hadoop.sh         |  10 -
 .../src/common/hue/do-component-build           |  39 -
 bigtop-packages/src/common/hue/install_hue.sh   | 204 ----
 bigtop-packages/src/deb/hue/changelog           |   1 -
 bigtop-packages/src/deb/hue/compat              |   1 -
 bigtop-packages/src/deb/hue/control             | 136 ---
 bigtop-packages/src/deb/hue/copyright           | 202 ----
 bigtop-packages/src/deb/hue/docs                |   1 -
 .../src/deb/hue/hue-app.postinst.tpl            |  59 --
 bigtop-packages/src/deb/hue/hue-app.prerm.tpl   |  63 --
 bigtop-packages/src/deb/hue/hue-beeswax.install |   1 -
 bigtop-packages/src/deb/hue/hue-common.install  |  29 -
 bigtop-packages/src/deb/hue/hue-common.postinst |  69 --
 bigtop-packages/src/deb/hue/hue-common.postrm   |  56 --
 bigtop-packages/src/deb/hue/hue-common.preinst  |  73 --
 bigtop-packages/src/deb/hue/hue-common.prerm    |  61 --
 bigtop-packages/src/deb/hue/hue-doc.install     |   1 -
 bigtop-packages/src/deb/hue/hue-hbase.install   |   1 -
 bigtop-packages/src/deb/hue/hue-impala.install  |   1 -
 bigtop-packages/src/deb/hue/hue-pig.install     |   1 -
 bigtop-packages/src/deb/hue/hue-rdbms.install   |   1 -
 bigtop-packages/src/deb/hue/hue-search.install  |   1 -
 .../src/deb/hue/hue-security.install            |   1 -
 bigtop-packages/src/deb/hue/hue-server.hue.init | 258 -----
 bigtop-packages/src/deb/hue/hue-server.prerm    |  49 -
 bigtop-packages/src/deb/hue/hue-spark.install   |   1 -
 bigtop-packages/src/deb/hue/hue-sqoop.install   |   1 -
 .../src/deb/hue/hue-zookeeper.install           |   1 -
 bigtop-packages/src/deb/hue/rules               |  52 -
 bigtop-packages/src/deb/hue/source/format       |   1 -
 .../src/deb/hue/source/include-binaries         |   1 -
 bigtop-packages/src/rpm/hue/BUILD/.gitignore    |   0
 bigtop-packages/src/rpm/hue/RPMS/.gitignore     |   0
 bigtop-packages/src/rpm/hue/SOURCES/.gitignore  |   0
 bigtop-packages/src/rpm/hue/SOURCES/hue.init    | 139 ---
 .../src/rpm/hue/SOURCES/hue.init.suse           | 181 ----
 bigtop-packages/src/rpm/hue/SPECS/hue.spec      | 555 -----------
 bigtop-packages/src/rpm/hue/SRPMS/.gitignore    |   0
 49 files changed, 6 insertions(+), 3398 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/MAINTAINERS.txt
----------------------------------------------------------------------
diff --git a/MAINTAINERS.txt b/MAINTAINERS.txt
index 2a61ead..93cf97a 100644
--- a/MAINTAINERS.txt
+++ b/MAINTAINERS.txt
@@ -11,7 +11,6 @@ hadoop: mark grover, cos, rvs
 hama: minho kim <mi...@apache.org>, edward j. yoon
 hbase: andrew purtell, rvs
 hive: mark grover, youngwoo kim
-hue: oflebbe
 itest: cos, rvs
 mvn publishing/packaging: rvs
 oozie evans ye, rvs

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/README.md
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/README.md b/bigtop-deploy/puppet/README.md
index 201e172..9f5425c 100644
--- a/bigtop-deploy/puppet/README.md
+++ b/bigtop-deploy/puppet/README.md
@@ -85,8 +85,8 @@ No changes are required to the default puppet 3 auth.conf.
 
 For other options that may be set here, look for class parameters in the modules'
 manifests/init.pp files. Any class parameter can be used as a hiera key if prefixed with the
-module and class namespace. Module hue's server class will look for its parameter rm_host as
-`hue::server::rm_host` in hiera.
+module and class namespace. Module kafka's server class will look for its parameter `port` as
+`kafka::server::port` in hiera.
 Note that if `hadoop::hadoop_storage_dirs` is left unset, puppet will attempt to guess which
 directories to use.
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml b/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
index cc87ac1..e9f3ff8 100644
--- a/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
+++ b/bigtop-deploy/puppet/hieradata/bigtop/cluster.yaml
@@ -17,7 +17,7 @@
 # be installed as usual. Otherwise only a specified list will be set
 # Possible elements:
 # hadoop,yarn,hbase,alluxio,flink,flume,solrcloud,spark,oozie,hcat,sqoop,sqoop2,httpfs,
-# hue,mahout,giraph,crunch,pig,hive,zookeeper,ycsb,qfs
+# mahout,giraph,crunch,pig,hive,zookeeper,ycsb,qfs
 # Example (to deploy only HDFS and YARN server and gateway parts)
 # This can be a comma-separated list or an array.
 #hadoop_cluster_node::cluster_components:
@@ -110,7 +110,6 @@ hadoop::common_hdfs::hadoop_namenode_port: "8020"
 
 hadoop::common_yarn::hadoop_ps_host: "%{hiera('bigtop::hadoop_head_node')}"
 hadoop::common_yarn::hadoop_rm_host: "%{hiera('bigtop::hadoop_head_node')}"
-# actually default but needed for hue::server::rm_port here
 hadoop::common_yarn::hadoop_rm_port: "8032"
 
 hadoop::common_mapred_app::jobtracker_host: "%{hiera('bigtop::hadoop_head_node')}"
@@ -120,7 +119,6 @@ bigtop::hadoop_history_server_port: "19888"
 bigtop::hadoop_history_server_url: "http://%{hiera('hadoop::common_mapred_app::mapreduce_jobhistory_host')}:%{hiera('bigtop::hadoop_history_server_port')}"
 hadoop::common_yarn::yarn_log_server_url: "%{hiera('bigtop::hadoop_history_server_url')}/jobhistory/logs"
 
-# actually default but needed for hue::server::webhdfs_url here
 hadoop::httpfs::hadoop_httpfs_port: "14000"
 
 bigtop::hadoop_zookeeper_port: "2181"
@@ -171,21 +169,6 @@ bigtop::sqoop2_server_port: "12000"
 bigtop::hbase_thrift_port: "9090"
 bigtop::hadoop_oozie_port: "11000"
 
-hue::server::rm_host: "%{hiera('hadoop::common_yarn::hadoop_rm_host')}"
-hue::server::rm_port: "%{hiera('hadoop::common_yarn::hadoop_rm_port')}"
-hue::server::rm_url: "http://%{hiera('bigtop::hadoop_head_node')}:%{hiera('bigtop::hadoop_rm_http_port')}"
-hue::server::rm_proxy_url: "http://%{hiera('hadoop::common_yarn::hadoop_ps_host')}:%{hiera('hadoop::common_yarn::hadoop_ps_port')}"
-hue::server::history_server_url: "%{hiera('bigtop::hadoop_history_server_url')}"
-# those use fqdn instead of hadoop_head_node because it's only ever activated
-# on the gatewaynode
-hue::server::webhdfs_url: "http://%{fqdn}:%{hiera('hadoop::httpfs::hadoop_httpfs_port')}/webhdfs/v1"
-hue::server::sqoop2_url: "http://%{fqdn}:%{hiera('bigtop::sqoop2_server_port')}/sqoop"
-hue::server::solr_url: "http://%{fqdn}:%{hiera('solr::server::port')}/solr/"
-hue::server::hbase_thrift_url: "%{fqdn}:%{hiera('bigtop::hbase_thrift_port')}"
-hue::server::oozie_url: "http://%{hiera('bigtop::hadoop_head_node')}:%{hiera('bigtop::hadoop_oozie_port')}/oozie"
-hue::server::default_fs: "%{hiera('bigtop::hadoop_namenode_uri')}"
-hue::server::kerberos_realm: "%{hiera('kerberos::site::realm')}"
-
 giraph::client::zookeeper_quorum: "%{hiera('bigtop::hadoop_head_node')}"
 
 hadoop_hive::common_config::hbase_zookeeper_quorum: "%{hiera('hadoop_hbase::common_config::zookeeper_quorum')}"

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/hieradata/site.yaml
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/hieradata/site.yaml b/bigtop-deploy/puppet/hieradata/site.yaml
index d1e74a9..9c2dfc8 100644
--- a/bigtop-deploy/puppet/hieradata/site.yaml
+++ b/bigtop-deploy/puppet/hieradata/site.yaml
@@ -25,7 +25,6 @@ hadoop::hadoop_storage_dirs:
 #  - hcat
 #  - hive
 #  - httpfs
-#  - hue
 #  - mahout
 #  - mapred-app
 #  - oozie

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/manifests/cluster.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/manifests/cluster.pp b/bigtop-deploy/puppet/manifests/cluster.pp
index 7579606..8394f8f 100644
--- a/bigtop-deploy/puppet/manifests/cluster.pp
+++ b/bigtop-deploy/puppet/manifests/cluster.pp
@@ -106,9 +106,6 @@ $roles_map = {
   httpfs => {
     gateway_server => ["httpfs-server"],
   },
-  hue => {
-    gateway_server => ["hue-server"],
-  },
   mahout => {
     client => ["mahout-client"],
   },
@@ -214,7 +211,6 @@ class node_with_roles ($roles = hiera("bigtop::roles")) inherits hadoop_cluster_
     "sqoop2",
     "hadoop_zookeeper",
     "hcatalog",
-    "hue",
     "mahout",
     "solr",
     "spark",

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
index 7d8e48f..9d83924 100644
--- a/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
+++ b/bigtop-deploy/puppet/modules/hadoop/manifests/init.pp
@@ -19,10 +19,9 @@ class hadoop ($hadoop_security_authentication = "simple",
   # Set from facter if available
   $hadoop_storage_dirs = split($::hadoop_storage_dirs, ";"),
   $proxyusers = {
-    oozie => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,hue,users', hosts => "*" },
-     hive => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,hue,users', hosts => "*" },
-      hue => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,hue,users', hosts => "*" },
-   httpfs => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,hue,users', hosts => "*" } },
+    oozie => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,users', hosts => "*" },
+     hive => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,users', hosts => "*" },
+   httpfs => { groups => 'hudson,testuser,root,hadoop,jenkins,oozie,hive,httpfs,users', hosts => "*" } },
   $generate_secrets = false,
 ) {
 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml b/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml
index 9f98929..3d10c27 100644
--- a/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml
+++ b/bigtop-deploy/puppet/modules/hadoop/templates/hdfs-site.xml
@@ -279,25 +279,6 @@
     <description>The name of the group of super-users.</description>
   </property>
 
-  <!-- Enable Hue plugins -->
-<% if @hadoop_dfs_namenode_plugins -%>
-  <property>
-    <name>dfs.namenode.plugins</name>
-    <value><%= @hadoop_dfs_namenode_plugins %></value>
-    <description>Comma-separated list of namenode plug-ins to be activated.
-    </description>
-  </property>
-
-<% end -%>
-<% if @hadoop_dfs_datanode_plugins -%>
-  <property>
-    <name>dfs.datanode.plugins</name>
-    <value><%= @hadoop_dfs_datanode_plugins %></value>
-    <description>Comma-separated list of datanode plug-ins to be activated.
-    </description>
-  </property>
-
-<% end -%>
   <!-- increase the number of datanode transceivers way above the default of 256
      - this is for hbase -->
   <property>

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hadoop_oozie/templates/oozie-site.xml
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hadoop_oozie/templates/oozie-site.xml b/bigtop-deploy/puppet/modules/hadoop_oozie/templates/oozie-site.xml
index 93d5eaf..674316d 100644
--- a/bigtop-deploy/puppet/modules/hadoop_oozie/templates/oozie-site.xml
+++ b/bigtop-deploy/puppet/modules/hadoop_oozie/templates/oozie-site.xml
@@ -330,40 +330,4 @@
             KerberosName for more details.
         </description>
     </property>
-
-    <!-- Proxyuser Configuration -->
-
-    <property>
-        <name>oozie.service.ProxyUserService.proxyuser.hue.hosts</name>
-        <value>*</value>
-        <description>
-            List of hosts the '#USER#' user is allowed to perform 'doAs'
-            operations.
-
-            The '#USER#' must be replaced with the username o the user who is
-            allowed to perform 'doAs' operations.
-
-            The value can be the '*' wildcard or a list of hostnames.
-
-            For multiple users copy this property and replace the user name
-            in the property name.
-        </description>
-    </property>
-
-    <property>
-        <name>oozie.service.ProxyUserService.proxyuser.hue.groups</name>
-        <value>*</value>
-        <description>
-            List of groups the '#USER#' user is allowed to impersonate users
-            from to perform 'doAs' operations.
-
-            The '#USER#' must be replaced with the username o the user who is
-            allowed to perform 'doAs' operations.
-
-            The value can be the '*' wildcard or a list of groups.
-
-            For multiple users copy this property and replace the user name
-            in the property name.
-        </description>
-    </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hue/manifests/init.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hue/manifests/init.pp b/bigtop-deploy/puppet/modules/hue/manifests/init.pp
deleted file mode 100644
index fa189a1..0000000
--- a/bigtop-deploy/puppet/modules/hue/manifests/init.pp
+++ /dev/null
@@ -1,82 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-class hue {
-  class deploy ($roles) {
-    if ("hue-server" in $roles) {
-      include hue::server
-      if ("httpfs-server" in $roles) {
-        Class['Hadoop::Httpfs'] -> Class['Hue::Server']
-      }
-      if ("hbase-client" in $roles) {
-        Class['Hadoop_hbase::Client'] -> Class['Hue::Server']
-      }
-    }
-  }
-
-  class server($sqoop2_url = "http://localhost:12000/sqoop", $solr_url = "http://localhost:8983/solr/", $hbase_thrift_url = "",
-               $webhdfs_url, $rm_host, $rm_port, $oozie_url, $rm_proxy_url, $history_server_url,
-               $hive_host = "", $hive_port = "10000",
-		$zookeeper_host_port = "localhost:2181",
-               $force_username_lowercase = "false",
-               $group_filter_value = "objectclass=groupOfEntries",
-               $nt_domain = undef,
-               $use_ldap_username_pattern = false,
-               $ldap_username_pattern = undef,
-               $remote_deployement_dir = "/user/hue/oozie/deployments",
-               $rm_logical_name = undef, $rm_api_port = "8088", $app_blacklist = "impala, security",
-               $hue_host = "0.0.0.0", $hue_port = "8888", $hue_timezone = "America/Los_Angeles",
-               $default_fs = "hdfs://localhost:8020",
-               $kerberos_realm = "", $kerberos_principal = "", $huecert = undef, $huekey = undef,
-               $auth_backend = "desktop.auth.backend.AllowFirstUserDjangoBackend",
-               $ldap_url = undef, $ldap_cert = undef, $use_start_tls = "true",
-               $base_dn = undef , $bind_dn = undef, $bind_password = undef,
-               $user_name_attr = undef, $user_filter = undef,
-               $group_member_attr = undef, $group_filter = undef,
-               $hue_apps = "all", $default_hdfs_superuser = "hdfs" ) {
-
-    $hue_packages = $hue_apps ? {
-      "all"     => [ "hue", "hue-server" ], # The hue metapackage requires all apps
-      "none"    => [ "hue-server" ],
-      default   => concat(prefix($hue_apps, "hue-"), [ "hue-server" ])
-    }
-
-    if ($kerberos_realm and $kerberos_realm != "") {
-      require kerberos::client
-      kerberos::host_keytab { "hue":
-        spnego => false,
-        require => Package["hue-server"],
-      }
-    }
-
-    package { $hue_packages:
-      ensure => latest,
-    }
-
-    file { "/etc/hue/conf/hue.ini":
-      content => template("hue/hue.ini"),
-      require => Package[$hue_packages],
-    }
-
-    service { "hue":
-      ensure => running,
-      require => [ Package[$hue_packages], File["/etc/hue/conf/hue.ini"]],
-      subscribe => [ Package[$hue_packages], File["/etc/hue/conf/hue.ini"]],
-      hasrestart => true,
-      hasstatus => true,
-    }
-    Kerberos::Host_keytab <| title == "hue" |> -> Service["hue"]
-  }
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hue/templates/hue.ini
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hue/templates/hue.ini b/bigtop-deploy/puppet/modules/hue/templates/hue.ini
deleted file mode 100644
index 51c0316..0000000
--- a/bigtop-deploy/puppet/modules/hue/templates/hue.ini
+++ /dev/null
@@ -1,956 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# Hue configuration file
-# ===================================
-#
-# For complete documentation about the contents of this file, run
-#   $ <hue_root>/build/env/bin/hue config_help
-#
-# All .ini files under the current directory are treated equally.  Their
-# contents are merged to form the Hue configuration, which can
-# can be viewed on the Hue at
-#   http://<hue_host>:<port>/dump_config
-
-
-###########################################################################
-# General configuration for core Desktop features (authentication, etc)
-###########################################################################
-
-[desktop]
-
-#  send_dbug_messages=1#
-#
-#  # To show database transactions, set database_logging to 1
-#  database_logging=0
-
-  # Set this to a random string, the longer the better.
-  # This is used for secure hashing in the session store.
-  secret_key=Geheim!
-
-  # Webserver listens on this address and port
-  http_host=<%= @hue_host %>
-  http_port=<%= @hue_port %>
-
-  # Time zone name
-  time_zone=<%= @hue_timezone %>
-
-  # Enable or disable Django debug mode.
-  django_debug_mode=false
-
-  # Enable or disable backtrace for server error
-  http_500_debug_mode=false
-
-  # Enable or disable memory profiling.
-  ## memory_profiler=false
-
-  # Server email for internal error messages
-  ## django_server_email='hue@localhost.localdomain'
-
-  # Email backend
-  ## django_email_backend=django.core.mail.backends.smtp.EmailBackend
-
-  # Webserver runs as this user
-  ## server_user=hue
-  ## server_group=hue
-
-  # This should be the Hue admin and proxy user
-  ## default_user=hue
-
-  # This should be the hadoop cluster admin
-  default_hdfs_superuser=<%= @default_hdfs_superuser %>
-
-  # If set to false, runcpserver will not actually start the web server.
-  # Used if Apache is being used as a WSGI container.
-  ## enable_server=yes
-
-  # Number of threads used by the CherryPy web server
-  ## cherrypy_server_threads=10
-
-  # Filename of SSL Certificate
-  <%if @huecert %>
-  ssl_certificate=<%= @huecert %>
-  <% end -%>
-
-  # Filename of SSL RSA Private Key
-  <%if @huekey %>
-  ssl_private_key=<%= @huekey %>
-  <% end -%>
-
-  # List of allowed and disallowed ciphers in cipher list format.
-  # See http://www.openssl.org/docs/apps/ciphers.html for more information on cipher list format.
-  ## ssl_cipher_list=DEFAULT:!aNULL:!eNULL:!LOW:!EXPORT:!SSLv2
-
-  # LDAP username and password of the hue user used for LDAP authentications.
-  # Set it to use LDAP Authentication with HiveServer2 and Impala.
-  ## ldap_username=hue
-  ## ldap_password=
-
-  # Default encoding for site data
-  ## default_site_encoding=utf-8
-
-  # Help improve Hue with anonymous usage analytics.
-  # Use Google Analytics to see how many times an application or specific section of an application is used, nothing more.
-  collect_usage=false
-
-  # Support for HTTPS termination at the load-balancer level with SECURE_PROXY_SSL_HEADER.
-  ## secure_proxy_ssl_header=false
-
-  # Comma-separated list of Django middleware classes to use.
-  # See https://docs.djangoproject.com/en/1.4/ref/middleware/ for more details on middlewares in Django.
-  ## middleware=desktop.auth.backend.LdapSynchronizationBackend
-
-  # Comma-separated list of regular expressions, which match the redirect URL.
-  # For example, to restrict to your local domain and FQDN, the following value can be used:
-  # ^\/.*$,^http:\/\/www.mydomain.com\/.*$
-  ## redirect_whitelist=
-
-  # Comma separated list of apps to not load at server startup.
-  # e.g.: pig,zookeeper
-  app_blacklist=<%= @app_blacklist %>
-
-  # The directory where to store the auditing logs. Auditing is disable if the value is empty.
-  # e.g. /var/log/hue/audit.log
-  ## audit_event_log_dir=
-
-  # Size in KB/MB/GB for audit log to rollover.
-  ## audit_log_max_file_size=100MB
-
-  # Administrators
-  # ----------------
-  [[django_admins]]
-    ## [[[admin1]]]
-    ## name=john
-    ## email=john@doe.com
-
-  # UI customizations
-  # -------------------
-  [[custom]]
-
-    # Top banner HTML code
-    # e.g. <H2>Test Lab A2 Hue Services</H2>
-    ## banner_top_html=
-
-  # Configuration options for user authentication into the web application
-  # ------------------------------------------------------------------------
-  [[auth]]
-
-    # Authentication backend. Common settings are:
-    # - django.contrib.auth.backends.ModelBackend (entirely Django backend)
-    # - desktop.auth.backend.AllowAllBackend (allows everyone)
-    # - desktop.auth.backend.AllowFirstUserDjangoBackend
-    #     (Default. Relies on Django and user manager, after the first login)
-    # - desktop.auth.backend.LdapBackend
-    # - desktop.auth.backend.PamBackend
-    # - desktop.auth.backend.SpnegoDjangoBackend
-    # - desktop.auth.backend.RemoteUserDjangoBackend
-    # - libsaml.backend.SAML2Backend
-    # - libopenid.backend.OpenIDBackend
-    # - liboauth.backend.OAuthBackend
-    #     (Support Twitter, Facebook, Google+ and Linkedin
-    backend=<%= @auth_backend %>
-
-    # The service to use when querying PAM.
-    #pam_service=login
-
-    # When using the desktop.auth.backend.RemoteUserDjangoBackend, this sets
-    # the normalized name of the header that contains the remote user.
-    # The HTTP header in the request is converted to a key by converting
-    # all characters to uppercase, replacing any hyphens with underscores
-    # and adding an HTTP_ prefix to the name. So, for example, if the header
-    # is called Remote-User that would be configured as HTTP_REMOTE_USER
-    #
-    # Defaults to HTTP_REMOTE_USER
-    ## remote_user_header=HTTP_REMOTE_USER
-
-    # Ignore the case of usernames when searching for existing users.
-    # Only supported in remoteUserDjangoBackend.
-    ## ignore_username_case=false
-
-    # Ignore the case of usernames when searching for existing users to authenticate with.
-    # Only supported in remoteUserDjangoBackend.
-    ## force_username_lowercase=false
-
-    # Users will expire after they have not logged in for 'n' amount of seconds.
-    # A negative number means that users will never expire.
-    ## expires_after=-1
-
-    # Apply 'expires_after' to superusers.
-    ## expire_superusers=true
-
-  # Configuration options for connecting to LDAP and Active Directory
-  # -------------------------------------------------------------------
-  [[ldap]]
-
-<% if @ldap_url %>
-    # The search base for finding users and groups
-    base_dn="<%= @base_dn %>"
-
-<%     if @nt_domain -%>
-    # The NT domain to connect to (only for use with Active Directory)
-    nt_domain=<%= @nt_domain %>
-<%     end -%>
-
-    # URL of the LDAP server
-    ldap_url=<%= @ldap_url %> 
-
-    # A PEM-format file containing certificates for the CA's that
-    # Hue will trust for authentication over TLS.
-    # The certificate for the CA that signed the
-    # LDAP server certificate must be included among these certificates.
-    # See more here http://www.openldap.org/doc/admin24/tls.html.
-<%     if @ldap_cert -%>
-    ldap_cert=<%= @ldap_cert %>
-<%     end -%>
-    use_start_tls=<%= @use_start_tls %>
-
-    # Distinguished name of the user to bind as -- not necessary if the LDAP server
-    # supports anonymous searches
-<%     if @bind_dn -%>
-    # Distinguished name of the user to bind as -- not necessary if the LDAP server
-    # supports anonymous searches
-    bind_dn="<%= @bind_dn %>"
-
-    # Password of the bind user -- not necessary if the LDAP server supports
-    # anonymous searches
-    bind_password=<%= @bind_password %>
-<%     end -%>
-
-<%     if @user_filter -%>
-    # Use search bind authentication.
-    search_bind_authentication=true
-<%     else -%>
-    # Pattern for searching for usernames -- Use <username> for the parameter
-    # For use when using LdapBackend for Hue authentication
-
-<%     if @use_ldap_username_pattern -%>
-    # for example, ldap_username_pattern=uid=<username>,ou=People,dc=mycompany,dc=com
-    ldap_username_pattern="<%= @ldap_username_pattern %>"
-<%     end -%> 
- 
-    search_bind_authentication=false
-<%     end -%>
-    # Execute this script to produce the bind user password. This will be used
-    # when `bind_password` is not set.
-    ## bind_password_script=
-
-    # Create users in Hue when they try to login with their LDAP credentials
-    # For use when using LdapBackend for Hue authentication
-    create_users_on_login = true
-
-    # Synchronize a users groups when they login
-    ## sync_groups_on_login=false
-
-    # Ignore the case of usernames when searching for existing users in Hue.
-    ignore_username_case=true
-
-    # Force usernames to lowercase when creating new users from LDAP.
-    force_username_lowercase=<%= @force_username_lowercase %>
-
-    # Choose which kind of subgrouping to use: nested or suboordinate (deprecated).
-    ## subgroups=suboordinate
-
-    # Define the number of levels to search for nested members.
-    ## nested_members_search_depth=10
-
-    # Whether or not to follow referrals
-    ## follow_referrals=false
-
-    # Enable python-ldap debugging.
-    ## debug=false
-
-    # Sets the debug level within the underlying LDAP C lib.
-    ## debug_level=255
-
-    # Possible values for trace_level are 0 for no logging, 1 for only logging the method calls with arguments,
-    # 2 for logging the method calls with arguments and the complete results and 9 for also logging the traceback of method calls.
-    ## trace_level=0
-
-    [[[users]]]
-<%     if @user_filter -%>
-      # Base filter for searching for users
-      user_filter="<%= @user_filter %>"
-<%     end -%>
-<%     if @user_name_attr -%>
-      # The username attribute in the LDAP schema
-      user_name_attr=<%= @user_name_attr %>
-<%     end -%>
-    [[[groups]]]
-
-      # Base filter for searching for groups
-<%     if @group_filter -%>
-      group_filter="<%= @group_filter_value %>"
-<%     end -%>
-
-      # The group name attribute in the LDAP schema
-      ## group_name_attr=cn
-
-      # The attribute of the group object which identifies the members of the group
-<%     if @group_member_attr -%>
-      group_member_attr=<%= @group_member_attr %>
-<%     end -%>
-<% end -%>
-
-  # Configuration options for specifying the Desktop Database. For more info,
-  # see http://docs.djangoproject.com/en/1.4/ref/settings/#database-engine
-  # ------------------------------------------------------------------------
-  [[database]]
-    engine=sqlite3
-    name=/var/lib/hue/desktop.db
-    # Database engine is typically one of:
-    # postgresql_psycopg2, mysql, sqlite3 or oracle.
-    #
-    # Note that for sqlite3, 'name', below is a path to the filename. For other backends, it is the database name.
-    # Note for Oracle, options={'threaded':true} must be set in order to avoid crashes.
-    # Note for Oracle, you can use the Oracle Service Name by setting "port=0" and then "name=<host>:<port>/<service_name>".
-    ## engine=sqlite3
-    ## host=
-    ## port=
-    ## user=
-    ## password=
-    ## name=desktop/desktop.db
-    ## options={}
-
-  # Configuration options for specifying the Desktop session.
-  # For more info, see https://docs.djangoproject.com/en/1.4/topics/http/sessions/
-  # ------------------------------------------------------------------------
-  [[session]]
-    # The cookie containing the users' session ID will expire after this amount of time in seconds.
-    # Default is 2 weeks.
-    ## ttl=1209600
-
-    # The cookie containing the users' session ID will be secure.
-    # Should only be enabled with HTTPS.
-    ## secure=false
-
-    # The cookie containing the users' session ID will use the HTTP only flag.
-    ## http_only=false
-
-    # Use session-length cookies. Logs out the user when she closes the browser window.
-    ## expire_at_browser_close=false
-
-
-  # Configuration options for connecting to an external SMTP server
-  # ------------------------------------------------------------------------
-  [[smtp]]
-
-    # The SMTP server information for email notification delivery
-    host=localhost
-    port=25
-    user=
-    password=
-
-    # Whether to use a TLS (secure) connection when talking to the SMTP server
-    tls=no
-
-    # Default email address to use for various automated notification from Hue
-    ## default_from_email=hue@localhost
-
-<% if @kerberos_realm != "" -%>
-  # Configuration options for Kerberos integration for secured Hadoop clusters
-  # ------------------------------------------------------------------------
-  [[kerberos]]
-
-    # Path to Hue's Kerberos keytab file
-    hue_keytab=/etc/hue.keytab
-    # Kerberos principal name for Hue
-    hue_principal=hue/<%= @fqdn %>@<%= @kerberos_realm %>
-    # Path to kinit
-    kinit_path=<%= (@operatingsystem == 'ubuntu' || @operatingsystem == 'Debian' || @operatingsystem == 'CentOS' ) ? '/usr/bin' : '/usr/kerberos/bin' %>/kinit
-
-<% end -%>
-  # Configuration options for using OAuthBackend (core) login
-  # ------------------------------------------------------------------------
-  [[oauth]]
-    # The Consumer key of the application
-    ## consumer_key=XXXXXXXXXXXXXXXXXXXXX
-
-    # The Consumer secret of the application
-    ## consumer_secret=XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
-
-    # The Request token URL
-    ## request_token_url=https://api.twitter.com/oauth/request_token
-
-    # The Access token URL
-    ## access_token_url=https://api.twitter.com/oauth/access_token
-
-    # The Authorize URL
-    ## authenticate_url=https://api.twitter.com/oauth/authorize
-
-
-###########################################################################
-# Settings to configure SAML
-###########################################################################
-
-[libsaml]
-  # Xmlsec1 binary path. This program should be executable by the user running Hue.
-  ## xmlsec_binary=/usr/local/bin/xmlsec1
-
-  # Entity ID for Hue acting as service provider.
-  # Can also accept a pattern where '<base_url>' will be replaced with server URL base.
-  ## entity_id="<base_url>/saml2/metadata/"
-
-  # Create users from SSO on login.
-  ## create_users_on_login=true
-
-  # Required attributes to ask for from IdP.
-  # This requires a comma separated list.
-  ## required_attributes=uid
-
-  # Optional attributes to ask for from IdP.
-  # This requires a comma separated list.
-  ## optional_attributes=
-
-  # IdP metadata in the form of a file. This is generally an XML file containing metadata that the Identity Provider generates.
-  ## metadata_file=
-
-  # Private key to encrypt metadata with.
-  ## key_file=
-
-  # Signed certificate to send along with encrypted metadata.
-  ## cert_file=
-
-  # A mapping from attributes in the response from the IdP to django user attributes.
-  ## user_attribute_mapping={'uid':'username'}
-
-  # Have Hue initiated authn requests be signed and provide a certificate.
-  ## authn_requests_signed=false
-
-  # Have Hue initiated logout requests be signed and provide a certificate.
-  ## logout_requests_signed=false
-
-  ## Username can be sourced from 'attributes' or 'nameid'.
-  ## username_source=attributes
-
-  # Performs the logout or not.
-  ## logout_enabled=true
-
-
-###########################################################################
-# Settings to configure OPENID
-###########################################################################
-
-[libopenid]
-  # (Required) OpenId SSO endpoint url.
-  ## server_endpoint_url=https://www.google.com/accounts/o8/id
-
-  # OpenId 1.1 identity url prefix to be used instead of SSO endpoint url
-  # This is only supported if you are using an OpenId 1.1 endpoint
-  ## identity_url_prefix=https://app.onelogin.com/openid/your_company.com/
-
-  # Create users from OPENID on login.
-  ## create_users_on_login=true
-
-  # Use email for username
-  ## use_email_for_username=true
-
-
-###########################################################################
-# Settings to configure OAuth
-###########################################################################
-
-[liboauth]
-  # NOTE:
-  # To work, each of the active (i.e. uncommented) service must have
-  # applications created on the social network.
-  # Then the "consumer key" and "consumer secret" must be provided here.
-  #
-  # The addresses where to do so are:
-  # Twitter:  https://dev.twitter.com/apps
-  # Google+ : https://cloud.google.com/
-  # Facebook: https://developers.facebook.com/apps
-  # Linkedin: https://www.linkedin.com/secure/developer
-  #
-  # Additionnaly, the following must be set in the application settings:
-  # Twitter:  Callback URL (aka Redirect URL) must be set to http://YOUR_HUE_IP_OR_DOMAIN_NAME/oauth/social_login/oauth_authenticated
-  # Google+ : CONSENT SCREEN must have email address
-  # Facebook: Sandbox Mode must be DISABLED
-  # Linkedin: "In OAuth User Agreement", r_emailaddress is REQUIRED
-
-  # The Consumer key of the application
-  ## consumer_key_twitter=
-  ## consumer_key_google=
-  ## consumer_key_facebook=
-  ## consumer_key_linkedin=
-
-  # The Consumer secret of the application
-  ## consumer_secret_twitter=
-  ## consumer_secret_google=
-  ## consumer_secret_facebook=
-  ## consumer_secret_linkedin=
-
-  # The Request token URL
-  ## request_token_url_twitter=https://api.twitter.com/oauth/request_token
-  ## request_token_url_google=https://accounts.google.com/o/oauth2/auth
-  ## request_token_url_linkedin=https://www.linkedin.com/uas/oauth2/authorization
-  ## request_token_url_facebook=https://graph.facebook.com/oauth/authorize
-
-  # The Access token URL
-  ## access_token_url_twitter=https://api.twitter.com/oauth/access_token
-  ## access_token_url_google=https://accounts.google.com/o/oauth2/token
-  ## access_token_url_facebook=https://graph.facebook.com/oauth/access_token
-  ## access_token_url_linkedin=https://api.linkedin.com/uas/oauth2/accessToken
-
-  # The Authenticate URL
-  ## authenticate_url_twitter=https://api.twitter.com/oauth/authorize
-  ## authenticate_url_google=https://www.googleapis.com/oauth2/v1/userinfo?access_token=
-  ## authenticate_url_facebook=https://graph.facebook.com/me?access_token=
-  ## authenticate_url_linkedin=https://api.linkedin.com/v1/people/~:(email-address)?format=json&oauth2_access_token=
-
-  # Username Map. Json Hash format.
-  # Replaces username parts in order to simplify usernames obtained
-  # Example: {"@sub1.domain.com":"_S1", "@sub2.domain.com":"_S2"}
-  # converts 'email@sub1.domain.com' to 'email_S1'
-  ## username_map={}
-
-  # Whitelisted domains (only applies to Google OAuth). CSV format.
-  ## whitelisted_domains_google=
-
-###########################################################################
-# Settings for the RDBMS application
-###########################################################################
-
-[librdbms]
-  # The RDBMS app can have any number of databases configured in the databases
-  # section. A database is known by its section name
-  # (IE sqlite, mysql, psql, and oracle in the list below).
-
-  [[databases]]
-    # sqlite configuration.
-    ## [[[sqlite]]]
-      # Name to show in the UI.
-      ## nice_name=SQLite
-
-      # For SQLite, name defines the path to the database.
-      ## name=/tmp/sqlite.db
-
-      # Database backend to use.
-      ## engine=sqlite
-
-      # Database options to send to the server when connecting.
-      # https://docs.djangoproject.com/en/1.4/ref/databases/
-      ## options={}
-
-    # mysql, oracle, or postgresql configuration.
-    ## [[[mysql]]]
-      # Name to show in the UI.
-      ## nice_name="My SQL DB"
-
-      # For MySQL and PostgreSQL, name is the name of the database.
-      # For Oracle, Name is instance of the Oracle server. For express edition
-      # this is 'xe' by default.
-      ## name=mysqldb
-
-      # Database backend to use. This can be:
-      # 1. mysql
-      # 2. postgresql
-      # 3. oracle
-      ## engine=mysql
-
-      # IP or hostname of the database to connect to.
-      ## host=localhost
-
-      # Port the database server is listening to. Defaults are:
-      # 1. MySQL: 3306
-      # 2. PostgreSQL: 5432
-      # 3. Oracle Express Edition: 1521
-      ## port=3306
-
-      # Username to authenticate with when connecting to the database.
-      ## user=example
-
-      # Password matching the username to authenticate with when
-      # connecting to the database.
-      ## password=example
-
-      # Database options to send to the server when connecting.
-      # https://docs.djangoproject.com/en/1.4/ref/databases/
-      ## options={}
-
-###########################################################################
-# Settings to configure your Hadoop cluster.
-###########################################################################
-
-[hadoop]
-
-  # Configuration for HDFS NameNode
-  # ------------------------------------------------------------------------
-  [[hdfs_clusters]]
-    # HA support by using HttpFs
-
-    [[[default]]]
-      # Enter the filesystem uri
-      fs_defaultfs=<%= @default_fs %>
-
-      # NameNode logical name.
-      ## logical_name=
-
-      # Use WebHdfs/HttpFs as the communication mechanism.
-      # Domain should be the NameNode or HttpFs host.
-      # Default port is 14000 for HttpFs.
-      webhdfs_url=<%= @webhdfs_url %>
-
-      # Change this if your HDFS cluster is Kerberos-secured
-      security_enabled=<%= if (@kerberos_realm != "") ; "true" else "false" end %>
-
-      # Default umask for file and directory creation, specified in an octal value.
-      ## umask=022
-
-  # Configuration for YARN (MR2)
-  # ------------------------------------------------------------------------
-  [[yarn_clusters]]
-<% resourcemanager_hosts = Array(@rm_host) -%>
-<% resourcemanager_hosts.each do |host| -%>
-  [[[<%= host %>]]]
-      # Enter the host on which you are running the ResourceManager
-      resourcemanager_host=<%= host %>
-
-      # The port where the ResourceManager IPC listens on
-      # resourcemanager_port=<% rm_port %>
-
-      # Whether to submit jobs to this cluster
-      submit_to=True
-<%     if @rm_logical_name -%>
-      # Resource Manager logical name (required for HA)
-      logical_name=<%= @rm_logical_name %>
-<%     else -%>
-      # Resource Manager logical name (required for HA)
-      # logical_name=
-<%     end -%>
-
-      # Change this if your YARN cluster is Kerberos-secured
-      security_enabled=<%= if (@kerberos_realm != "") ; "true" else "false" end %>
-
-      # URL of the ResourceManager API
-      resourcemanager_api_url=http://<%= host %>:<%= @rm_api_port %>
-
-      # URL of the ProxyServer API
-      proxy_api_url=<%= @rm_proxy_url %>
-
-      # URL of the HistoryServer API
-      history_server_api_url=<%= @history_server_url %>
-
-      # URL of the NodeManager API
-      node_manager_api_url=http://localhost:8042
-<% end -%>
-
-  # Configuration for MapReduce (MR1)
-  # ------------------------------------------------------------------------
-
-###########################################################################
-# Settings to configure the Filebrowser app
-###########################################################################
-
-[filebrowser]
-  # Location on local filesystem where the uploaded archives are temporary stored.
-  ## archive_upload_tempdir=/tmp
-
-###########################################################################
-# Settings to configure liboozie
-###########################################################################
-
-[liboozie]
-  # The URL where the Oozie service runs on. This is required in order for
-  # users to submit jobs.
-  oozie_url=<%= @oozie_url %>
-
-  security_enabled=<%= if (@kerberos_realm != "") ; "true" else "false" end %>
-
-  # Location on HDFS where the workflows/coordinator are deployed when submitted.
-  remote_deployement_dir=<%= @remote_deployement_dir %>
-
-
-###########################################################################
-# Settings to configure the Oozie app
-###########################################################################
-
-[oozie]
-  # Location on local FS where the examples are stored.
-  ## local_data_dir=..../examples
-
-  # Location on local FS where the data for the examples is stored.
-  ## sample_data_dir=...thirdparty/sample_data
-
-  # Location on HDFS where the oozie examples and workflows are stored.
-  ## remote_data_dir=/user/hue/oozie/workspaces
-
-  # Maximum of Oozie workflows or coodinators to retrieve in one API call.
-  ## oozie_jobs_count=100
-
-  # Use Cron format for defining the frequency of a Coordinator instead of the old frequency number/unit.
-  ## enable_cron_scheduling=true
-
-
-###########################################################################
-# Settings to configure Beeswax with Hive
-###########################################################################
-
-[beeswax]
-
-<% if @hive_host -%>
-  # Host where HiveServer2 is running.
-  # If Kerberos security is enabled, use fully-qualified domain name (FQDN).
-  hive_server_host=<%= @hive_host %>
-
-  # Port where HiveServer2 Thrift server runs on.
-  hive_server_port=<%= @hive_port %>
-
-  # Hive configuration directory, where hive-site.xml is located
-  hive_conf_dir=/etc/hive/conf
-
-  # Timeout in seconds for thrift calls to Hive service
-  ## server_conn_timeout=120
-
-  # Choose whether Hue uses the GetLog() thrift call to retrieve Hive logs.
-  # If false, Hue will use the FetchResults() thrift call instead.
-  use_get_log_api=false
-
-  # Set a LIMIT clause when browsing a partitioned table.
-  # A positive value will be set as the LIMIT. If 0 or negative, do not set any limit.
-  ## browse_partitioned_table_limit=250
-
-  # A limit to the number of rows that can be downloaded from a query.
-  # A value of -1 means there will be no limit.
-  # A maximum of 65,000 is applied to XLS downloads.
-  ## download_row_limit=1000000
-
-  # Hue will try to close the Hive query when the user leaves the editor page.
-  # This will free all the query resources in HiveServer2, but also make its results inaccessible.
-  ## close_queries=false
-
-  # Thrift version to use when communicating with HiveServer2
-  ## thrift_version=5
-
-  [[ssl]]
-    # SSL communication enabled for this server.
-    ## enabled=false
-
-    # Path to Certificate Authority certificates.
-    ## cacerts=/etc/hue/cacerts.pem
-
-    # Path to the private key file.
-    ## key=/etc/hue/key.pem
-
-    # Path to the public certificate file.
-    ## cert=/etc/hue/cert.pem
-
-    # Choose whether Hue should validate certificates received from the server.
-    ## validate=true
-<% end -%>
-
-###########################################################################
-# Settings to configure Pig
-###########################################################################
-
-[pig]
-  # Location of piggybank.jar on local filesystem.
-  ## local_sample_dir=/usr/share/hue/apps/pig/examples
-
-  # Location piggybank.jar will be copied to in HDFS.
-  ## remote_data_dir=/user/hue/pig/examples
-
-
-###########################################################################
-# Settings to configure Sqoop
-###########################################################################
-
-[sqoop]
-  # For autocompletion, fill out the librdbms section.
-
-  # Sqoop server URL
-<% if @sqoop2_url != "" -%>
-  server_url=<%= @sqoop2_url %>
-<% end -%>
-
-###########################################################################
-# Settings to configure Proxy
-###########################################################################
-
-[proxy]
-  # Comma-separated list of regular expressions,
-  # which match 'host:port' of requested proxy target.
-  ## whitelist=(localhost|127\.0\.0\.1):(50030|50070|50060|50075)
-
-  # Comma-separated list of regular expressions,
-  # which match any prefix of 'host:port/path' of requested proxy target.
-  # This does not support matching GET parameters.
-  ## blacklist=
-
-
-###########################################################################
-# Settings to configure Impala
-###########################################################################
-
-[impala]
-  # Host of the Impala Server (one of the Impalad)
-  ## server_host=localhost
-
-  # Port of the Impala Server
-  ## server_port=21050
-
-  # Kerberos principal
-  ## impala_principal=impala/hostname.foo.com
-
-  # Turn on/off impersonation mechanism when talking to Impala
-  ## impersonation_enabled=False
-
-  # Number of initial rows of a result set to ask Impala to cache in order
-  # to support re-fetching them for downloading them.
-  # Set to 0 for disabling the option and backward compatibility.
-  ## querycache_rows=50000
-
-  # Timeout in seconds for thrift calls
-  ## server_conn_timeout=120
-
-  # Hue will try to close the Impala query when the user leaves the editor page.
-  # This will free all the query resources in Impala, but also make its results inaccessible.
-  ## close_queries=true
-
-  # If QUERY_TIMEOUT_S > 0, the query will be timed out (i.e. cancelled) if Impala does not do any work
-  # (compute or send back results) for that query within QUERY_TIMEOUT_S seconds.
-  ## query_timeout_s=600
-
-
-###########################################################################
-# Settings to configure HBase Browser
-###########################################################################
-
-[hbase]
-  # Comma-separated list of HBase Thrift servers for clusters in the format of '(name|host:port)'.
-  # Use full hostname with security.
-<% if @hbase_thrift_url != "" -%>
-  hbase_clusters=(Bigtop|<%= @hbase_thrift_url %>)
-<% else -%>
-  ## hbase_clusters=(Cluster|localhost:9090)
-<% end -%>
-  # HBase configuration directory, where hbase-site.xml is located.
-  ## hbase_conf_dir=/etc/hbase/conf
-
-  # Hard limit of rows or columns per row fetched before truncating.
-  ## truncate_limit = 500
-
-  # 'buffered' is the default of the HBase Thrift Server and supports security.
-  # 'framed' can be used to chunk up responses,
-  # which is useful when used in conjunction with the nonblocking server in Thrift.
-  ## thrift_transport=buffered
-
-
-###########################################################################
-# Settings to configure Solr Search
-###########################################################################
-
-[search]
-
-  # URL of the Solr Server
-<% if @solr_url != "" -%>
-  solr_url=<%= @solr_url %>
-<% end -%>
-  # Requires FQDN in solr_url if enabled
-  ## security_enabled=false
-
-  ## Query sent when no term is entered
-  ## empty_query=*:*
-
-
-###########################################################################
-# Settings to configure Solr Indexer
-###########################################################################
-
-[indexer]
-
-  # Location of the solrctl binary.
-  ## solrctl_path=/usr/bin/solrctl
-
-  # Location of the solr home.
-  ## solr_home=/usr/lib/solr
-
-  # Zookeeper ensemble.
-  ## solr_zk_ensemble=localhost:2181/solr
-
-  # The contents of this directory will be copied over to the solrctl host to its temporary directory.
-  ## config_template_path=/../hue/desktop/libs/indexer/src/data/solr_configs
-
-
-###########################################################################
-# Settings to configure Job Designer
-###########################################################################
-
-[jobsub]
-
-  # Location on local FS where examples and template are stored.
-  ## local_data_dir=..../data
-
-  # Location on local FS where sample data is stored
-  ## sample_data_dir=...thirdparty/sample_data
-
-
-###########################################################################
-# Settings to configure Job Browser.
-###########################################################################
-
-[jobbrowser]
-  # Share submitted jobs information with all users. If set to false,
-  # submitted jobs are visible only to the owner and administrators.
-  ## share_jobs=true
-
-
-###########################################################################
-# Settings to configure the Zookeeper application.
-###########################################################################
-
-[zookeeper]
-
-  [[clusters]]
-
-    [[[default]]]
-      # Zookeeper ensemble. Comma separated list of Host/Port.
-      # e.g. localhost:2181,localhost:2182,localhost:2183
-      host_ports=<%= @zookeeper_host_port %>
-
-      # The URL of the REST contrib service (required for znode browsing)
-      rest_url=http://localhost:9998
-
-
-###########################################################################
-# Settings to configure the Spark application.
-###########################################################################
-
-[spark]
-  # URL of the REST Spark Job Server.
-  ## server_url=http://localhost:8090/
-
-
-###########################################################################
-# Settings for the User Admin application
-###########################################################################
-
-[useradmin]
-  # The name of the default user group that users will be a member of
-  ## default_user_group=default
-
-
-###########################################################################
-# Settings for the Sentry lib
-###########################################################################
-
-[libsentry]
-  # Hostname or IP of server.
-  ## hostname=localhost
-
-  # Port the sentry service is running on.
-  ## port=8038
-
-  # Sentry configuration directory, where sentry-site.xml is located.
-  ## sentry_conf_dir=/etc/sentry/conf

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-deploy/puppet/modules/hue/tests/init.pp
----------------------------------------------------------------------
diff --git a/bigtop-deploy/puppet/modules/hue/tests/init.pp b/bigtop-deploy/puppet/modules/hue/tests/init.pp
deleted file mode 100644
index 38fa1a0..0000000
--- a/bigtop-deploy/puppet/modules/hue/tests/init.pp
+++ /dev/null
@@ -1,24 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-hue::server { "test-hue-server": 
-  sqoop2_url   => "http://localhost:12000/sqoop",
-  solr_url    => "http://localhost:8983/solr/",
-  hbase_thrift_url => "localhost:9090",
-  webhdfs_url => "http://localhost:14000/webhdfs/v1",
-  rm_host     => "localhost",
-  rm_port     => "8032",
-  oozie_url   => "http://localhost:11000/oozie",
-}

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/common/hadoop/install_hadoop.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hadoop/install_hadoop.sh b/bigtop-packages/src/common/hadoop/install_hadoop.sh
index f8aef9b..9957dcf 100755
--- a/bigtop-packages/src/common/hadoop/install_hadoop.sh
+++ b/bigtop-packages/src/common/hadoop/install_hadoop.sh
@@ -342,16 +342,6 @@ rm ${HTTP_DIRECTORY}/conf/ssl-server.xml
 
 mv $HADOOP_ETC_DIR/conf.empty/httpfs* $HTTPFS_ETC_DIR/conf.empty
 sed -i -e '/<\/configuration>/i\
-  <!-- HUE proxy user setting -->\
-  <property>\
-    <name>httpfs.proxyuser.hue.hosts</name>\
-    <value>*</value>\
-  </property>\
-  <property>\
-    <name>httpfs.proxyuser.hue.groups</name>\
-    <value>*</value>\
-  </property>\
-\
   <property>\
     <name>httpfs.hadoop.config.dir</name>\
     <value>/etc/hadoop/conf</value>\

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/common/hue/do-component-build
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hue/do-component-build b/bigtop-packages/src/common/hue/do-component-build
deleted file mode 100644
index 3487d4a..0000000
--- a/bigtop-packages/src/common/hue/do-component-build
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -ex
-. `dirname $0`/bigtop.bom
-
-
-rm -rf desktop/core/ext-py/pyopenssl desktop/core/ext-py/cryptography-1.3.1
-wget https://github.com/pyca/cryptography/archive/1.8.2.tar.gz
-tar -xz -C desktop/core/ext-py/ -f 1.8.2.tar.gz
-rm -f 1.8.2.tar.gz
-wget https://pypi.python.org/packages/3b/15/a5d90ab1a41075e8f0fae334f13452549528f82142b3b9d0c9d86ab7178c/pyOpenSSL-17.5.0.tar.gz
-tar -xz -C desktop/core/ext-py/ -f pyOpenSSL-17.5.0.tar.gz
-rm -f pyOpenSSL-17.5.0.tar.gz
-wget https://pypi.python.org/packages/c6/70/bb32913de251017e266c5114d0a645f262fb10ebc9bf6de894966d124e35/packaging-16.8.tar.gz
-tar -xz -C desktop/core/ext-py/ -f packaging-16.8.tar.gz
-rm -f packaging-16.8.tar.gz
-
-#FIXME: this needs to be fixed upstream
-sed -i -e "s#${FULL_VERSION}-SNAPSHOT#${FULL_VERSION}#g" `grep -lR ${FULL_VERSION}-SNAPSHOT .`
-export PATH=$PATH:/usr/lib/mit/bin
-
-make MAVEN_VERSION=${HUE_VERSION} apps docs locales
-
-# Make the entire tree relocatable
-bash tools/relocatable.sh

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/common/hue/install_hue.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/hue/install_hue.sh b/bigtop-packages/src/common/hue/install_hue.sh
deleted file mode 100644
index 2fe6bab..0000000
--- a/bigtop-packages/src/common/hue/install_hue.sh
+++ /dev/null
@@ -1,204 +0,0 @@
-#!/bin/bash
-
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-usage() {
-  echo "
-usage: $0 <options>
-  Required not-so-options:
-     --build-dir=DIR             path to Hue dist.dir
-     --prefix=PREFIX             path to install into
-
-  Optional options:
-     --doc-dir=DIR               path to install docs into [/usr/share/doc/hue]
-     --lib-dir=DIR               path to install Hue home [/usr/lib/hue]
-     --installed-lib-dir=DIR     path where lib-dir will end up on target system
-     --bin-dir=DIR               path to install bins [/usr/bin]
-     ... [ see source for more similar options ]
-  "
-  exit 1
-}
-
-OPTS=$(getopt \
-  -n $0 \
-  -o '' \
-  -l 'prefix:' \
-  -l 'doc-dir:' \
-  -l 'lib-dir:' \
-  -l 'installed-lib-dir:' \
-  -l 'bin-dir:' \
-  -l 'build-dir:' -- "$@")
-
-if [ $? != 0 ] ; then
-    usage
-fi
-
-eval set -- "$OPTS"
-while true ; do
-    case "$1" in
-        --prefix)
-        PREFIX=$2 ; shift 2
-        ;;
-        --build-dir)
-        BUILD_DIR=$2 ; shift 2
-        ;;
-        --doc-dir)
-        DOC_DIR=$2 ; shift 2
-        ;;
-        --lib-dir)
-        LIB_DIR=$2 ; shift 2
-        ;;
-        --installed-lib-dir)
-        INSTALLED_LIB_DIR=$2 ; shift 2
-        ;;
-        --bin-dir)
-        BIN_DIR=$2 ; shift 2
-        ;;
-        --)
-        shift ; break
-        ;;
-        *)
-        echo "Unknown option: $1"
-        usage
-        exit 1
-        ;;
-    esac
-done
-
-for var in PREFIX BUILD_DIR ; do
-  if [ -z "$(eval "echo \$$var")" ]; then
-    echo Missing param: $var
-    usage
-  fi
-done
-
-PREFIX=`echo $PREFIX | sed -e 's#/*$##'`
-BUILD_DIR=`echo $BUILD_DIR | sed -e 's#/*$##'`
-
-DOC_DIR=${DOC_DIR:-/usr/share/doc/hue}
-CONF_DIR=${CONF_DIR:-/etc/hue}
-LIB_DIR=${LIB_DIR:-/usr/lib/hue}
-VAR_DIR=${VAR_DIR:-/var/lib/hue}
-LOG_DIR=${LOG_DIR:-/var/log/hue}
-HADOOP_DIR=${HADOOP_DIR:-/usr/lib/hadoop/lib}
-
-BUNDLED_BUILD_DIR=$PREFIX/$LIB_DIR/build
-
-# Install all the files 
-(cd $BUILD_DIR ; PREFIX=`dirname $PREFIX/$LIB_DIR` MAVEN_OPTIONS="-Dmaven.repo.local=${PWD}/.m2/repository" make install MAVEN_VERSION='$(DESKTOP_VERSION)')
-
-# Install plugins
-install -d -m 0755 $PREFIX/$HADOOP_DIR
-ln -fs $LIB_DIR/desktop/libs/hadoop/java-lib/*plugin*jar $PREFIX/$HADOOP_DIR
-
-# Making the resulting tree relocatable
-# WARNING: We HAVE to run this twice, before and after the apps get registered.
-#          we have to run it one time before so that the path to the interpreter
-#          inside of $PREFIX/$LIB_DIR/build/env/bin/hue gets relativized. If we
-#          don't relativize it we run into a risk of breaking the build when the
-#          length of the path to the interpreter ends up being longer than 80
-#          character (which is the limit for #!)
-(cd $PREFIX/$LIB_DIR ; bash tools/relocatable.sh)
-
-# remove RECORD files since it contains "real" paths confusing rpmbuild
-(cd $PREFIX/$LIB_DIR ; rm -f build/env/lib/python*/site-packages/*.dist-info/RECORD)
-(cd $PREFIX/$LIB_DIR ; rm -f build/env/lib/python*/dist-packages/*.dist-info/RECORD)
-
-# Remove Hue database and then recreate it, but with just the "right" apps
-rm -f $PREFIX/$LIB_DIR/desktop/desktop.db $PREFIX/$LIB_DIR/app.reg
-APPS="about filebrowser help proxy useradmin jobbrowser jobsub oozie metastore"
-export DESKTOP_LOG_DIR=$BUILD_DIR
-export DESKTOP_LOGLEVEL=WARN
-export ROOT=$PREFIX/$LIB_DIR
-for app in $APPS ; do
-  (cd $PREFIX/$LIB_DIR ; ./build/env/bin/python tools/app_reg/app_reg.py --install apps/$app)
-done
-find $PREFIX/$LIB_DIR -iname \*.py[co]  -exec rm -f {} \;
-
-# Making the resulting tree relocatable for the second time
-(cd $PREFIX/$LIB_DIR ; bash tools/relocatable.sh)
-
-# Install conf files
-install -d -m 0755 $PREFIX/$CONF_DIR
-cp -r ${BUILD_DIR}/desktop/conf.dist $PREFIX/${CONF_DIR}/conf.empty
-rm -rf $PREFIX/$LIB_DIR/desktop/conf
-ln -fs $CONF_DIR/conf $PREFIX/$LIB_DIR/desktop/conf
-sed -i -e '/\[\[database\]\]/a\
-    engine=sqlite3\
-    name=/var/lib/hue/desktop.db' $PREFIX/${CONF_DIR}/conf.empty/hue.ini
-sed -i -e '/\[\[yarn_clusters\]\]/,+20s@## submit_to=False@submit_to=True@' \
-    $PREFIX/${CONF_DIR}/conf.empty/hue.ini
-
-# Relink logs subdirectory just in case
-install -d -m 0755 $PREFIX/$LOG_DIR
-rm -rf $PREFIX/$LIB_DIR/desktop/logs
-ln -s $LOG_DIR $PREFIX/$LIB_DIR/desktop/logs
-# remove the logs in build progress
-rm -rf $PREFIX/$LIB_DIR/apps/logs/*
-
-# Make binary scripts executables
-chmod 755 $BUNDLED_BUILD_DIR/env/bin/*
-
-# Preparing filtering command
-SED_FILT="-e s|$PREFIX|| -e s|$BUILD_DIR|$LIB_DIR|"
-
-# Fix broken symlinks
-for sm in $BUNDLED_BUILD_DIR/env/lib*; do
-  if [ -h ${sm} ] ; then
-    SM_ORIG_DEST_FILE=`ls -l "${sm}" | sed -e 's/.*-> //' `
-    SM_DEST_FILE=`echo $SM_ORIG_DEST_FILE | sed $SED_FILT`
-
-    rm ${sm}
-    ln -s ${SM_DEST_FILE} ${sm}
-  fi
-done
-
-# Fix broken python scripts
-ALL_PTH_BORKED=`find $PREFIX -iname "*.pth"`
-ALL_REG_BORKED=`find $PREFIX -iname "app.reg"`
-ALL_PYTHON_BORKED=`find $PREFIX -iname "*.egg-link"`
-HUE_BIN_SCRIPTS=$BUNDLED_BUILD_DIR/env/bin/*
-HUE_EGG_SCRIPTS=$BUNDLED_BUILD_DIR/env/lib*/python*/site-packages/*/EGG-INFO/scripts/*
-for file in $HUE_BIN_SCRIPTS $HUE_EGG_SCRIPTS $ALL_PTH_BORKED $ALL_REG_BORKED $ALL_PYTHON_BORKED ;
-do
-  if [ -f ${file} ]
-  then
-    sed -i $SED_FILT ${file}
-  fi
-done
-
-# Remove bogus files
-rm -fv `find $PREFIX -iname "build_log.txt"`
-
-install -d ${PREFIX}/${DOC_DIR}
-cp -r ${BUILD_DIR}/build/docs/* ${PREFIX}/${DOC_DIR}/
-
-# FXIME: for Hue 3.0 the following section would need to go away (hence it is kept at the bottom)
-
-# Move desktop.db to a var location
-install -d -m 0755 $PREFIX/$VAR_DIR
-mv $PREFIX/$LIB_DIR/desktop/desktop.db $PREFIX/$VAR_DIR
-
-# Move hue.pth to a var location
-mv $PREFIX/$LIB_DIR/build/env/lib/python*/site-packages/hue.pth $PREFIX/$VAR_DIR
-ln -s $VAR_DIR/hue.pth `ls -d $PREFIX/$LIB_DIR/build/env/lib/python*/site-packages/`/hue.pth
-
-# Move app.reg to a var location
-mv $PREFIX/$LIB_DIR/app.reg $PREFIX/$VAR_DIR
-ln -s $VAR_DIR/app.reg $PREFIX/$LIB_DIR/app.reg
-sed -i -e '/HUE_APP_REG_DIR/s#INSTALL_ROOT#"/var/lib/hue/"#' $PREFIX/$LIB_DIR/tools/app_reg/common.py

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/changelog
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/changelog b/bigtop-packages/src/deb/hue/changelog
deleted file mode 100644
index 547ed02..0000000
--- a/bigtop-packages/src/deb/hue/changelog
+++ /dev/null
@@ -1 +0,0 @@
---- This is auto-generated 

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/compat
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/compat b/bigtop-packages/src/deb/hue/compat
deleted file mode 100644
index ec63514..0000000
--- a/bigtop-packages/src/deb/hue/compat
+++ /dev/null
@@ -1 +0,0 @@
-9

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/control
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/control b/bigtop-packages/src/deb/hue/control
deleted file mode 100644
index d8f33cf..0000000
--- a/bigtop-packages/src/deb/hue/control
+++ /dev/null
@@ -1,136 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-Source: hue
-Section: misc
-Priority: extra
-Maintainer: Apache Bigtopr <de...@bigtop.apache.org>
-Build-Depends: debhelper (>= 7.0.50~), python2.6-dev | python2.7-dev, libxml2-dev, libxslt1-dev, zlib1g-dev, libsqlite3-dev, libldap2-dev, libsasl2-dev, libmysqlclient-dev | libmariadbclient-dev, make, python-setuptools, libkrb5-dev, libgmp3-dev, libffi-dev
-Standards-Version: 3.9.4
-Homepage: http://github.com/cloudera/hue
-
-Package: hue-common
-Architecture: any
-Depends: ${shlibs:Depends}, ${misc:Depends}, ${build:SYS-PYTHON}, libsasl2-modules-gssapi-mit, libxslt1.1, make, python (>= 2.6), libgmp10, libffi6
-Description: A browser-based desktop interface for Hadoop
- Hue is a browser-based desktop interface for interacting with Hadoop.
- It supports a file browser, job tracker interface, cluster health monitor, and more.
-
-Package: hue
-Architecture: all
-Depends: hue-common (= ${source:Version}), hue-server (= ${source:Version}), hue-beeswax (= ${source:Version}), hue-impala (= ${source:Version}), hue-pig (= ${source:Version}), hue-hbase (= ${source:Version}), hue-search (= ${source:Version}), hue-sqoop (= ${source:Version}), hue-rdbms (= ${source:Version}), hue-security (= ${source:Version}), hue-spark (= ${source:Version}), hue-zookeeper (= ${source:Version})
-Description: The hue metapackage
- Hue is a browser-based desktop interface for interacting with Hadoop.
- It supports a file browser, job tracker interface, cluster health monitor, and more.
-
-Package: hue-server
-Architecture: all
-Depends: ${misc:Depends}, hue-common (= ${source:Version})
-Description: Service Scripts for Hue
- This package provides the service scripts for Hue server.
-
-Package: hue-doc
-Architecture: all
-Description: Documentation for Hue
- This package provides the installation manual, user guide, SDK documentation, and release notes.
-
-Package: hue-beeswax
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Hive on Hue
- Beeswax is a web interface for Hive.
- .
- It allows users to construct and run queries on Hive, manage tables,
- and import and export data.
-
-Package: hue-impala
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Impala
- A web interface for Impala.
- .
- It allows users to construct and run Imapala jobs.
-
-
-Package: hue-pig
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version}) 
-Description: A UI for Pig on Hue
- A web interface for Pig.
- .
- It allows users to construct and run Pig jobs.
-
-Package: hue-hbase
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for HBase on Hue
- A web interface for HBase.
- .
- It allows users to run HBase queries
-
-Package: hue-sqoop
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Sqoop on Hue
- A web interface to Sqoop.
-
-Package: hue-search
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Search on Hue
- A web interface to Search.
- .
- It allows users to interact with Solr
-
-Package: hue-rdbms
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for RDBMS on Hue
- A web interface to RDBMS.
- .
- It allows users to interact with RDBMS
-
-Package: hue-security
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Spark on Hue
- A web interface to roles and security.
- .
- It allows users to interact with Hive roles
-
-Package: hue-spark
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Spark on Hue
- A web interface to Spark.
- .
- It allows users to interact with Spark
-
-Package: hue-useradmin
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for user administration 
- A web interface for user administration
- .
- It allows users to administration of users
-
-Package: hue-zookeeper
-Architecture: all
-Depends: ${misc:Depends}, python (>= 2.6), python (<< 3), make (>= 3.8), hue-common (= ${source:Version})
-Description: A UI for Zookeeper on Hue
- A web interface to Zookeeper.
- .
- It allows users to interact with Zookeeper
-

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/copyright
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/copyright b/bigtop-packages/src/deb/hue/copyright
deleted file mode 100644
index d645695..0000000
--- a/bigtop-packages/src/deb/hue/copyright
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/docs
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/docs b/bigtop-packages/src/deb/hue/docs
deleted file mode 100644
index b43bf86..0000000
--- a/bigtop-packages/src/deb/hue/docs
+++ /dev/null
@@ -1 +0,0 @@
-README.md

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/hue-app.postinst.tpl
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/hue-app.postinst.tpl b/bigtop-packages/src/deb/hue/hue-app.postinst.tpl
deleted file mode 100644
index 6c51b86..0000000
--- a/bigtop-packages/src/deb/hue/hue-app.postinst.tpl
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-# summary of how this script can be called:
-#        * <postinst> `configure' <most-recently-configured-version>
-#        * <old-postinst> `abort-upgrade' <new version>
-#        * <conflictor's-postinst> `abort-remove' `in-favour' <package>
-#          <new-version>
-#        * <postinst> `abort-remove'
-#        * <deconfigured's-postinst> `abort-deconfigure' `in-favour'
-#          <failed-install-package> <version> `removing'
-#          <conflicting-package> <version>
-# for details, see http://www.debian.org/doc/debian-policy/ or
-# the debian-policy package
-
-APP=@APP@
-export ROOT=/usr/lib/hue
-APP_DIR=$ROOT/apps/$APP
-export DESKTOP_LOGLEVEL=WARN
-export DESKTOP_LOG_DIR=/var/log/hue
-
-case "$1" in
-    configure)
-        (cd $ROOT; $ROOT/build/env/bin/python $ROOT/tools/app_reg/app_reg.py --remove $APP) ||:
-        (cd $ROOT; $ROOT/build/env/bin/python $ROOT/tools/app_reg/app_reg.py --install $APP_DIR)
-        chown -R hue:hue /var/log/hue /var/lib/hue
-    ;;
-
-    abort-upgrade|abort-remove|abort-deconfigure)
-    ;;
-
-    *)
-        echo "postinst called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-# dh_installdeb will replace this with shell code automatically
-# generated by other debhelper scripts.
-
-#DEBHELPER#
-
-exit 0

http://git-wip-us.apache.org/repos/asf/bigtop/blob/23582dc0/bigtop-packages/src/deb/hue/hue-app.prerm.tpl
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/deb/hue/hue-app.prerm.tpl b/bigtop-packages/src/deb/hue/hue-app.prerm.tpl
deleted file mode 100644
index a61c838..0000000
--- a/bigtop-packages/src/deb/hue/hue-app.prerm.tpl
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-set -e
-
-# summary of how this script can be called:
-#        * <prerm> `remove'
-#        * <old-prerm> `upgrade' <new-version>
-#        * <new-prerm> `failed-upgrade' <old-version>
-#        * <conflictor's-prerm> `remove' `in-favour' <package> <new-version>
-#        * <deconfigured's-prerm> `deconfigure' `in-favour'
-#          <package-being-installed> <version> `removing'
-#          <conflicting-package> <version>
-# for details, see http://www.debian.org/doc/debian-policy/ or
-# the debian-policy package
-
-APP=@APP@
-export ROOT=/usr/lib/hue
-APP_DIR=$ROOT/apps/$APP
-export DESKTOP_LOGLEVEL=WARN
-export DESKTOP_LOG_DIR=/var/log/hue
-env_python="$ROOT/build/env/bin/python"
-app_reg="$ROOT/tools/app_reg/app_reg.py"
-
-case "$1" in
-    remove|upgrade|deconfigure)
-        if test -e $app_reg -a -e $env_python ; then
-	    $env_python $app_reg --remove $APP ||:
-        fi
-        find $APP_DIR -name \*.py[co] -exec rm -f {} \; ||:
-        find $APP_DIR -name \*.egg-info -prune -exec rm -Rf {} \; ||:
-        chown -R hue:hue /var/log/hue /var/lib/hue || :
-    ;;
-
-    failed-upgrade)
-    ;;
-
-    *)
-        echo "prerm called with unknown argument \`$1'" >&2
-        exit 1
-    ;;
-esac
-
-# dh_installdeb will replace this with shell code automatically
-# generated by other debhelper scripts.
-
-#DEBHELPER#
-
-exit 0